1.0.0.dev1

This commit is contained in:
Bedir Tuğra Karaabalı 2025-05-18 03:22:51 +03:00
parent 30f6c8641a
commit 16976ea64c
1121 changed files with 199110 additions and 0 deletions

0
__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

6
apps.py Normal file
View File

@ -0,0 +1,6 @@
import dataclasses
class SubApp:
def __init__(self, name):
self.name = name

63
basic_conf.py Normal file
View File

@ -0,0 +1,63 @@
import sys
from setup_project import BasicSetup
from apps import SubApp
#### -setup basic
### -setup modules
### -setup hard
###fastorg -setup basic -app account
###fastorg -app tiles
###fastorg -include account
class BasicConfig:
def __init__(self):
self.subappname = "account_manager"
self.mainfile = "main.py"
self.docker_file = "docker-compose.yml"
commands = {
'setup' : ['basic', 'modules', 'hard'],
'app' : ['account', 'tiles'],
'include' : ['account', 'docker'],
}
### fastorg setup basic app multiverse include docker
class CommandAST:
def __init__(self):
self.setuptype = None
self.subappname = None
self.includetype = None
self.subapps = []
def read_command(self, commands):
tokens = iter(commands)
for cmd in tokens:
if cmd == 'setup':
self.setuptype = next(tokens, None)
elif cmd == 'app':
self.subapps.append(SubApp(next(tokens, None)))
elif cmd == 'include':
self.includetype = next(tokens, None)
else:
print(f"Unknown command: {cmd}")
def __str__(self):
return (f"CommandAST(setuptype={self.setuptype}, "
f"subappname={self.subappname}, "
f"includetype={self.includetype})")
def basic_setup(self):
setup = BasicSetup(mainfile="main2.py", docker_file="docker-compose.yml",
db_name='db_name',
db_type='sqlite')
setup.subapps = self.subapps
setup.create()

67
config.py Normal file
View File

@ -0,0 +1,67 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, DeclarativeBase
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy import Table, Column, Integer, String, Float, Boolean, ForeignKey
from passlib.context import CryptContext
from dotenv import load_dotenv
import os
load_dotenv()
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
SECRET_KEY = os.getenv("SECRET_KEY")
ALGORITHM = os.getenv("ALGORITHM")
ACCESS_TOKEN_EXPIRE_MINUTES = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", 30))
DATABASE_URL = os.getenv("DATABASE_URL")
# Engine oluştur
engine = create_engine(DATABASE_URL, echo=False)
# Session factory oluştur
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
#Base = declarative_base() #sqlalchemy için bu sınıfı kullanıyoruz 'class DBUser(Base)' şeklinde tanımlıyoruz
class Base(DeclarativeBase):
pass #yeni sqlalchemy sürümünde bu sınıfı kullanıyoruz
#models te içe aktarmayı unutma
def init_db():
#Base.metadata.drop_all(engine) # Veritabanını her başlangıcta siler burayada dikkat !!!!!!!!
Base.metadata.create_all(bind=engine) # Veritabanını oluşturur
# Session dependency (FastAPI için)
def get_session_db() -> 'Generator[Session, None]':
db = SessionLocal()
try:
yield db
finally:
db.close()
### SECRET KEY ###
origins = [
"http://localhost",
"http://localhost:8080",
"http://localhost:3000",
"http://localhost:8000",
]
app = FastAPI()
@app.on_event("startup")
def startup_event():
init_db()
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

BIN
dist/fastorg-1.0.0.dev1-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/fastorg-1.0.0.dev1.tar.gz vendored Normal file

Binary file not shown.

0
docker-compose.yml Normal file
View File

14
fastorg.egg-info/PKG-INFO Normal file
View File

@ -0,0 +1,14 @@
Metadata-Version: 2.4
Name: fastorg
Version: 1.0.0.dev1
Summary: basic organizer for fast api project
Author-email: Bedir Karaabali <bedirkaraabali@gmail.com>
Project-URL: HomePage, http://git.bedirkaraabali.me/bdrtr/FastOrganizer
Requires-Python: >=3.12
Description-Content-Type: text/markdown
License-File: LICENSE
Dynamic: license-file
# FastOrganizer
the basic conf for fastapi with scalable project and small agile teams.

View File

@ -0,0 +1,7 @@
LICENSE
README.md
pyproject.toml
fastorg.egg-info/PKG-INFO
fastorg.egg-info/SOURCES.txt
fastorg.egg-info/dependency_links.txt
fastorg.egg-info/top_level.txt

View File

@ -0,0 +1 @@

View File

@ -0,0 +1 @@
myenv

11
main.py Normal file
View File

@ -0,0 +1,11 @@
import basic_conf
import setup_project
import sys
cmd = sys.argv[1:]
cmd_ast = basic_conf.CommandAST()
cmd_ast.read_command(cmd)
cmd_ast.basic_setup()

3
main2.py Normal file
View File

@ -0,0 +1,3 @@
from .config import app
from .merhaba.router import router as merhaba_router
app.include_router(merhaba_router)

247
myenv/bin/Activate.ps1 Normal file
View File

@ -0,0 +1,247 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

70
myenv/bin/activate Normal file
View File

@ -0,0 +1,70 @@
# This file must be used with "source bin/activate" *from bash*
# You cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
# on Windows, a path can contain colons and backslashes and has to be converted:
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
# transform D:\path\to\venv to /d/path/to/venv on MSYS
# and to /cygdrive/d/path/to/venv on Cygwin
export VIRTUAL_ENV=$(cygpath /home/bedir/Documents/VSCODE/FastOrganizer/myenv)
else
# use the path as-is
export VIRTUAL_ENV=/home/bedir/Documents/VSCODE/FastOrganizer/myenv
fi
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/"bin":$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1='(myenv) '"${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT='(myenv) '
export VIRTUAL_ENV_PROMPT
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null

27
myenv/bin/activate.csh Normal file
View File

@ -0,0 +1,27 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV /home/bedir/Documents/VSCODE/FastOrganizer/myenv
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = '(myenv) '"$prompt"
setenv VIRTUAL_ENV_PROMPT '(myenv) '
endif
alias pydoc python -m pydoc
rehash

69
myenv/bin/activate.fish Normal file
View File

@ -0,0 +1,69 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV /home/bedir/Documents/VSCODE/FastOrganizer/myenv
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) '(myenv) ' (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT '(myenv) '
end

8
myenv/bin/pip Executable file
View File

@ -0,0 +1,8 @@
#!/home/bedir/Documents/VSCODE/FastOrganizer/myenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
myenv/bin/pip3 Executable file
View File

@ -0,0 +1,8 @@
#!/home/bedir/Documents/VSCODE/FastOrganizer/myenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
myenv/bin/pip3.12 Executable file
View File

@ -0,0 +1,8 @@
#!/home/bedir/Documents/VSCODE/FastOrganizer/myenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
myenv/bin/pyproject-build Executable file
View File

@ -0,0 +1,8 @@
#!/home/bedir/Documents/VSCODE/FastOrganizer/myenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from build.__main__ import entrypoint
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entrypoint())

1
myenv/bin/python Symbolic link
View File

@ -0,0 +1 @@
python3

1
myenv/bin/python3 Symbolic link
View File

@ -0,0 +1 @@
/usr/bin/python3

1
myenv/bin/python3.12 Symbolic link
View File

@ -0,0 +1 @@
python3

View File

@ -0,0 +1,20 @@
Copyright © 2019 Filipe Laíns <filipe.lains@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next
paragraph) shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,158 @@
Metadata-Version: 2.1
Name: build
Version: 1.2.2.post1
Summary: A simple, correct Python build frontend
Author-email: Filipe Laíns <lains@riseup.net>, Bernát Gábor <gaborjbernat@gmail.com>, layday <layday@protonmail.com>, Henry Schreiner <henryschreineriii@gmail.com>
Requires-Python: >= 3.8
Description-Content-Type: text/markdown
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Dist: packaging >= 19.1
Requires-Dist: pyproject_hooks
Requires-Dist: colorama; os_name == "nt"
Requires-Dist: importlib-metadata >= 4.6; python_full_version < "3.10.2"
Requires-Dist: tomli >= 1.1.0; python_version < "3.11"
Requires-Dist: furo >= 2023.08.17 ; extra == "docs"
Requires-Dist: sphinx ~= 7.0 ; extra == "docs"
Requires-Dist: sphinx-argparse-cli >= 1.5 ; extra == "docs"
Requires-Dist: sphinx-autodoc-typehints >= 1.10 ; extra == "docs"
Requires-Dist: sphinx-issues >= 3.0.0 ; extra == "docs"
Requires-Dist: build[uv, virtualenv] ; extra == "test"
Requires-Dist: filelock >= 3 ; extra == "test"
Requires-Dist: pytest >= 6.2.4 ; extra == "test"
Requires-Dist: pytest-cov >= 2.12 ; extra == "test"
Requires-Dist: pytest-mock >= 2 ; extra == "test"
Requires-Dist: pytest-rerunfailures >= 9.1 ; extra == "test"
Requires-Dist: pytest-xdist >= 1.34 ; extra == "test"
Requires-Dist: wheel >= 0.36.0 ; extra == "test"
Requires-Dist: setuptools >= 42.0.0 ; extra == "test" and ( python_version < "3.10")
Requires-Dist: setuptools >= 56.0.0 ; extra == "test" and ( python_version == "3.10")
Requires-Dist: setuptools >= 56.0.0 ; extra == "test" and ( python_version == "3.11")
Requires-Dist: setuptools >= 67.8.0 ; extra == "test" and ( python_version >= "3.12")
Requires-Dist: build[uv] ; extra == "typing"
Requires-Dist: importlib-metadata >= 5.1 ; extra == "typing"
Requires-Dist: mypy ~= 1.9.0 ; extra == "typing"
Requires-Dist: tomli ; extra == "typing"
Requires-Dist: typing-extensions >= 3.7.4.3 ; extra == "typing"
Requires-Dist: uv >= 0.1.18 ; extra == "uv"
Requires-Dist: virtualenv >= 20.0.35 ; extra == "virtualenv"
Project-URL: changelog, https://build.pypa.io/en/stable/changelog.html
Project-URL: homepage, https://build.pypa.io
Project-URL: issues, https://github.com/pypa/build/issues
Project-URL: source, https://github.com/pypa/build
Provides-Extra: docs
Provides-Extra: test
Provides-Extra: typing
Provides-Extra: uv
Provides-Extra: virtualenv
# build
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pypa/build/main.svg)](https://results.pre-commit.ci/latest/github/pypa/build/main)
[![CI test](https://github.com/pypa/build/actions/workflows/test.yml/badge.svg)](https://github.com/pypa/build/actions/workflows/test.yml)
[![codecov](https://codecov.io/gh/pypa/build/branch/main/graph/badge.svg)](https://codecov.io/gh/pypa/build)
[![Documentation Status](https://readthedocs.org/projects/pypa-build/badge/?version=latest)](https://build.pypa.io/en/latest/?badge=latest)
[![PyPI version](https://badge.fury.io/py/build.svg)](https://pypi.org/project/build/)
[![Discord](https://img.shields.io/discord/803025117553754132?label=Discord%20chat%20%23build)](https://discord.gg/pypa)
A simple, correct Python build frontend.
See the [documentation](https://build.pypa.io) for more information.
### Installation
`build` can be installed via `pip` or an equivalent via:
```console
$ pip install build
```
### Usage
```console
$ python -m build
```
This will build the package in an isolated environment, generating a
source-distribution and wheel in the directory `dist/`.
See the [documentation](https://build.pypa.io) for full information.
### Common arguments
- `--sdist` (`-s`): Produce just an SDist
- `--wheel` (`-w`): Produce just a wheel
- `-C<option>=<value>`: A Config-setting, the PEP 517 way of passing options to a backend. Can be passed multiple times. Matching options will make a list. Note that setuptools has very limited support.
- `--installer`: Pick an installer for the isolated build (`pip` or `uv`).
- `--no-isolation` (`-n`): Disable build isolation.
- `--skip-dependency-check` (`-x`): Disable dependency checking when not isolated; this should be done if some requirements or version ranges are not required for non-isolated builds.
- `--outdir` (`-o`): The output directory (defaults to `dist`)
Some common combinations of arguments:
- `--sdist --wheel` (`-sw`): Produce and SDist and a wheel, both from the source distribution. The default (if no flag is passed) is to build an SDist and then build a wheel _from_ the SDist.
- `-nx`: Disable build isolation and dependency checking. Identical to pip and uv's `--no-build-isolation` flag.
### Integration with other tools
#### pipx
If you use [pipx][], such as in GitHub Actions, the following command will download
and run build in one step:
```console
$ pipx run build
```
#### uv
If you want to use [uv][] to speed up the virtual environment creation, you can use
`--installer=uv`. You can get a Python wheel for `uv` with the `[uv]` extra.
Combining both suggestions yields the following:
```console
$ pipx run build[uv] --installer=uv
```
#### cibuildwheel
If you are using [cibuildwheel][], build is integrated and can be use with either (in your `pyproject.toml`):
```toml
[tool.cibuildwheel]
build-frontend = "build"
```
or
```toml
[tool.cibuildwheel]
build-frontend = "build[uv]"
```
(Be sure to pre-install uv before running cibuildwheel for this one!)
#### Conda-forge
On conda-forge, this package is called [python-build][].
### Code of Conduct
Everyone interacting in the build's codebase, issue trackers, chat rooms, and mailing lists is expected to follow
the [PSF Code of Conduct].
[psf code of conduct]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
[pipx]: https://pipx.pypa.io
[uv]: https://docs.astral.sh/uv/
[cibuildwheel]: https://cibuildwheel.pypa.io
[python-build]: https://github.com/conda-forge/python-build-feedstock

View File

@ -0,0 +1,35 @@
../../../bin/pyproject-build,sha256=7iWgDYNfB36cx3kNSW_O36kriheNTS8Hd_nOw7dmaZI,269
build-1.2.2.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
build-1.2.2.post1.dist-info/LICENSE,sha256=qvminKWQeXHM8H3gJTdds0U5qNXu684gtGCZgFciEG8,1113
build-1.2.2.post1.dist-info/METADATA,sha256=QweVLVd0qUBgEKOJsE6JtIRbqb2ozkOe4hyIAfeXOTc,6450
build-1.2.2.post1.dist-info/RECORD,,
build-1.2.2.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
build-1.2.2.post1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
build-1.2.2.post1.dist-info/entry_points.txt,sha256=MYbbM6JGYubgIjTgcLpNIZpc0PWbi9mRAW7VhtczlS8,105
build/__init__.py,sha256=4oxVWC_M_hRTRSyDrU4d0Q-3hPV6J0ThGGzqQWp2YWU,843
build/__main__.py,sha256=pvzlY6-r8VXHSD3h7SgANITl5vDV1ZMdEPx8n_LysuA,14227
build/__pycache__/__init__.cpython-312.pyc,,
build/__pycache__/__main__.cpython-312.pyc,,
build/__pycache__/_builder.cpython-312.pyc,,
build/__pycache__/_ctx.cpython-312.pyc,,
build/__pycache__/_exceptions.cpython-312.pyc,,
build/__pycache__/_types.cpython-312.pyc,,
build/__pycache__/_util.cpython-312.pyc,,
build/__pycache__/env.cpython-312.pyc,,
build/__pycache__/util.cpython-312.pyc,,
build/_builder.py,sha256=Z2uDHZxil43HLfhZ6KKc3qjd5ZNiJfK1Zu6WgJLJTA4,13469
build/_compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
build/_compat/__pycache__/__init__.cpython-312.pyc,,
build/_compat/__pycache__/importlib.cpython-312.pyc,,
build/_compat/__pycache__/tarfile.cpython-312.pyc,,
build/_compat/__pycache__/tomllib.cpython-312.pyc,,
build/_compat/importlib.py,sha256=mdUYYn5ciavBQ6zFE-2ghFDotKZC4QufbKMyPg5h3WI,467
build/_compat/tarfile.py,sha256=8-sDnbG-9mZ5OsmSVPArxE6KI6dPe0wX2yf4etDSqxk,816
build/_compat/tomllib.py,sha256=JT-q9fBu_vOt4fPWKh2bIKvfko633z_74F_TP_Y17cs,255
build/_ctx.py,sha256=QEBzQYYT2Buyj0eHkTKIQ3GK3LEV6rEC-2iRiuU4sT8,2880
build/_exceptions.py,sha256=lxEYr42eG-wgfLbcMhlks4cc0u-P3zMM8sbWraINwI4,1625
build/_types.py,sha256=if2-I_0F1HGWhhufZqZiBDvAd_niCMSKaHAmWHUIseg,673
build/_util.py,sha256=D4xFxoA4PT27xalv9a-5flCtZgZHNFaKpiIM2XpdM9s,2325
build/env.py,sha256=lDe3HfqNr4BI9mMFsGYp4YbF_IJyFBfO74OftF81KmM,13473
build/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
build/util.py,sha256=iBHPpU128o3EbxdFOTSO3a4NktO9DnTWiJoXBedqmu0,1776

View File

@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: flit 3.9.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,6 @@
[console_scripts]
pyproject-build=build.__main__:entrypoint
[pipx.run]
build=build.__main__:entrypoint

View File

@ -0,0 +1,39 @@
"""
build - A simple, correct Python build frontend
"""
from __future__ import annotations
from ._builder import ProjectBuilder
from ._exceptions import (
BuildBackendException,
BuildException,
BuildSystemTableValidationError,
FailedProcessError,
TypoWarning,
)
from ._types import ConfigSettings as ConfigSettingsType
from ._types import Distribution as DistributionType
from ._types import SubprocessRunner as RunnerType
from ._util import check_dependency
__version__ = '1.2.2.post1'
__all__ = [
'__version__',
'BuildBackendException',
'BuildException',
'BuildSystemTableValidationError',
'check_dependency',
'ConfigSettingsType',
'DistributionType',
'FailedProcessError',
'ProjectBuilder',
'RunnerType',
'TypoWarning',
]
def __dir__() -> list[str]:
return __all__

View File

@ -0,0 +1,455 @@
# SPDX-License-Identifier: MIT
from __future__ import annotations
import argparse
import contextlib
import contextvars
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import textwrap
import traceback
import warnings
from collections.abc import Iterator, Sequence
from functools import partial
from typing import NoReturn, TextIO
import build
from . import ProjectBuilder, _ctx
from . import env as _env
from ._exceptions import BuildBackendException, BuildException, FailedProcessError
from ._types import ConfigSettings, Distribution, StrPath
from .env import DefaultIsolatedEnv
_COLORS = {
'red': '\33[91m',
'green': '\33[92m',
'yellow': '\33[93m',
'bold': '\33[1m',
'dim': '\33[2m',
'underline': '\33[4m',
'reset': '\33[0m',
}
_NO_COLORS = {color: '' for color in _COLORS}
_styles = contextvars.ContextVar('_styles', default=_COLORS)
def _init_colors() -> None:
if 'NO_COLOR' in os.environ:
if 'FORCE_COLOR' in os.environ:
warnings.warn('Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color', stacklevel=2)
_styles.set(_NO_COLORS)
elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty():
return
_styles.set(_NO_COLORS)
def _cprint(fmt: str = '', msg: str = '', file: TextIO | None = None) -> None:
print(fmt.format(msg, **_styles.get()), file=file, flush=True)
def _showwarning(
message: Warning | str,
category: type[Warning],
filename: str,
lineno: int,
file: TextIO | None = None,
line: str | None = None,
) -> None: # pragma: no cover
_cprint('{yellow}WARNING{reset} {}', str(message))
_max_terminal_width = shutil.get_terminal_size().columns - 2
if _max_terminal_width <= 0:
_max_terminal_width = 78
_fill = partial(textwrap.fill, subsequent_indent=' ', width=_max_terminal_width)
def _log(message: str, *, origin: tuple[str, ...] | None = None) -> None:
if origin is None:
(first, *rest) = message.splitlines()
_cprint('{bold}{}{reset}', _fill(first, initial_indent='* '))
for line in rest:
print(_fill(line, initial_indent=' '))
elif origin[0] == 'subprocess':
initial_indent = '> ' if origin[1] == 'cmd' else '< '
file = sys.stderr if origin[1] == 'stderr' else None
for line in message.splitlines():
_cprint('{dim}{}{reset}', _fill(line, initial_indent=initial_indent), file=file)
def _setup_cli(*, verbosity: int) -> None:
warnings.showwarning = _showwarning
if platform.system() == 'Windows':
try:
import colorama
colorama.init()
except ModuleNotFoundError:
pass
_init_colors()
_ctx.LOGGER.set(_log)
_ctx.VERBOSITY.set(verbosity)
def _error(msg: str, code: int = 1) -> NoReturn: # pragma: no cover
"""
Print an error message and exit. Will color the output when writing to a TTY.
:param msg: Error message
:param code: Error code
"""
_cprint('{red}ERROR{reset} {}', msg)
raise SystemExit(code)
def _format_dep_chain(dep_chain: Sequence[str]) -> str:
return ' -> '.join(dep.partition(';')[0].strip() for dep in dep_chain)
def _build_in_isolated_env(
srcdir: StrPath,
outdir: StrPath,
distribution: Distribution,
config_settings: ConfigSettings | None,
installer: _env.Installer,
) -> str:
with DefaultIsolatedEnv(installer=installer) as env:
builder = ProjectBuilder.from_isolated_env(env, srcdir)
# first install the build dependencies
env.install(builder.build_system_requires)
# then get the extra required dependencies from the backend (which was installed in the call above :P)
env.install(builder.get_requires_for_build(distribution, config_settings or {}))
return builder.build(distribution, outdir, config_settings or {})
def _build_in_current_env(
srcdir: StrPath,
outdir: StrPath,
distribution: Distribution,
config_settings: ConfigSettings | None,
skip_dependency_check: bool = False,
) -> str:
builder = ProjectBuilder(srcdir)
if not skip_dependency_check:
missing = builder.check_dependencies(distribution, config_settings or {})
if missing:
dependencies = ''.join('\n\t' + dep for deps in missing for dep in (deps[0], _format_dep_chain(deps[1:])) if dep)
_cprint()
_error(f'Missing dependencies:{dependencies}')
return builder.build(distribution, outdir, config_settings or {})
def _build(
isolation: bool,
srcdir: StrPath,
outdir: StrPath,
distribution: Distribution,
config_settings: ConfigSettings | None,
skip_dependency_check: bool,
installer: _env.Installer,
) -> str:
if isolation:
return _build_in_isolated_env(srcdir, outdir, distribution, config_settings, installer)
else:
return _build_in_current_env(srcdir, outdir, distribution, config_settings, skip_dependency_check)
@contextlib.contextmanager
def _handle_build_error() -> Iterator[None]:
try:
yield
except (BuildException, FailedProcessError) as e:
_error(str(e))
except BuildBackendException as e:
if isinstance(e.exception, subprocess.CalledProcessError):
_cprint()
_error(str(e))
if e.exc_info:
tb_lines = traceback.format_exception(
e.exc_info[0],
e.exc_info[1],
e.exc_info[2],
limit=-1,
)
tb = ''.join(tb_lines)
else:
tb = traceback.format_exc(-1)
_cprint('\n{dim}{}{reset}\n', tb.strip('\n'))
_error(str(e))
except Exception as e: # pragma: no cover
tb = traceback.format_exc().strip('\n')
_cprint('\n{dim}{}{reset}\n', tb)
_error(str(e))
def _natural_language_list(elements: Sequence[str]) -> str:
if len(elements) == 0:
msg = 'no elements'
raise IndexError(msg)
elif len(elements) == 1:
return elements[0]
else:
return '{} and {}'.format(
', '.join(elements[:-1]),
elements[-1],
)
def build_package(
srcdir: StrPath,
outdir: StrPath,
distributions: Sequence[Distribution],
config_settings: ConfigSettings | None = None,
isolation: bool = True,
skip_dependency_check: bool = False,
installer: _env.Installer = 'pip',
) -> Sequence[str]:
"""
Run the build process.
:param srcdir: Source directory
:param outdir: Output directory
:param distribution: Distribution to build (sdist or wheel)
:param config_settings: Configuration settings to be passed to the backend
:param isolation: Isolate the build in a separate environment
:param skip_dependency_check: Do not perform the dependency check
"""
built: list[str] = []
for distribution in distributions:
out = _build(isolation, srcdir, outdir, distribution, config_settings, skip_dependency_check, installer)
built.append(os.path.basename(out))
return built
def build_package_via_sdist(
srcdir: StrPath,
outdir: StrPath,
distributions: Sequence[Distribution],
config_settings: ConfigSettings | None = None,
isolation: bool = True,
skip_dependency_check: bool = False,
installer: _env.Installer = 'pip',
) -> Sequence[str]:
"""
Build a sdist and then the specified distributions from it.
:param srcdir: Source directory
:param outdir: Output directory
:param distribution: Distribution to build (only wheel)
:param config_settings: Configuration settings to be passed to the backend
:param isolation: Isolate the build in a separate environment
:param skip_dependency_check: Do not perform the dependency check
"""
from ._compat import tarfile
if 'sdist' in distributions:
msg = 'Only binary distributions are allowed but sdist was specified'
raise ValueError(msg)
sdist = _build(isolation, srcdir, outdir, 'sdist', config_settings, skip_dependency_check, installer)
sdist_name = os.path.basename(sdist)
sdist_out = tempfile.mkdtemp(prefix='build-via-sdist-')
built: list[str] = []
if distributions:
# extract sdist
with tarfile.TarFile.open(sdist) as t:
t.extractall(sdist_out)
try:
_ctx.log(f'Building {_natural_language_list(distributions)} from sdist')
srcdir = os.path.join(sdist_out, sdist_name[: -len('.tar.gz')])
for distribution in distributions:
out = _build(isolation, srcdir, outdir, distribution, config_settings, skip_dependency_check, installer)
built.append(os.path.basename(out))
finally:
shutil.rmtree(sdist_out, ignore_errors=True)
return [sdist_name, *built]
def main_parser() -> argparse.ArgumentParser:
"""
Construct the main parser.
"""
parser = argparse.ArgumentParser(
description=textwrap.indent(
textwrap.dedent(
"""
A simple, correct Python build frontend.
By default, a source distribution (sdist) is built from {srcdir}
and a binary distribution (wheel) is built from the sdist.
This is recommended as it will ensure the sdist can be used
to build wheels.
Pass -s/--sdist and/or -w/--wheel to build a specific distribution.
If you do this, the default behavior will be disabled, and all
artifacts will be built from {srcdir} (even if you combine
-w/--wheel with -s/--sdist, the wheel will be built from {srcdir}).
"""
).strip(),
' ',
),
# Prevent argparse from taking up the entire width of the terminal window
# which impedes readability.
formatter_class=partial(argparse.RawDescriptionHelpFormatter, width=min(_max_terminal_width, 127)),
)
parser.add_argument(
'srcdir',
type=str,
nargs='?',
default=os.getcwd(),
help='source directory (defaults to current directory)',
)
parser.add_argument(
'--version',
'-V',
action='version',
version=f"build {build.__version__} ({','.join(build.__path__)})",
)
parser.add_argument(
'--verbose',
'-v',
dest='verbosity',
action='count',
default=0,
help='increase verbosity',
)
parser.add_argument(
'--sdist',
'-s',
dest='distributions',
action='append_const',
const='sdist',
help='build a source distribution (disables the default behavior)',
)
parser.add_argument(
'--wheel',
'-w',
dest='distributions',
action='append_const',
const='wheel',
help='build a wheel (disables the default behavior)',
)
parser.add_argument(
'--outdir',
'-o',
type=str,
help=f'output directory (defaults to {{srcdir}}{os.sep}dist)',
metavar='PATH',
)
parser.add_argument(
'--skip-dependency-check',
'-x',
action='store_true',
help='do not check that build dependencies are installed',
)
env_group = parser.add_mutually_exclusive_group()
env_group.add_argument(
'--no-isolation',
'-n',
action='store_true',
help='disable building the project in an isolated virtual environment. '
'Build dependencies must be installed separately when this option is used',
)
env_group.add_argument(
'--installer',
choices=_env.INSTALLERS,
help='Python package installer to use (defaults to pip)',
)
parser.add_argument(
'--config-setting',
'-C',
dest='config_settings',
action='append',
help='settings to pass to the backend. Multiple settings can be provided. '
'Settings beginning with a hyphen will erroneously be interpreted as options to build if separated '
'by a space character; use ``--config-setting=--my-setting -C--my-other-setting``',
metavar='KEY[=VALUE]',
)
return parser
def main(cli_args: Sequence[str], prog: str | None = None) -> None:
"""
Parse the CLI arguments and invoke the build process.
:param cli_args: CLI arguments
:param prog: Program name to show in help text
"""
parser = main_parser()
if prog:
parser.prog = prog
args = parser.parse_args(cli_args)
_setup_cli(verbosity=args.verbosity)
config_settings = {}
if args.config_settings:
for arg in args.config_settings:
setting, _, value = arg.partition('=')
if setting not in config_settings:
config_settings[setting] = value
else:
if not isinstance(config_settings[setting], list):
config_settings[setting] = [config_settings[setting]]
config_settings[setting].append(value)
# outdir is relative to srcdir only if omitted.
outdir = os.path.join(args.srcdir, 'dist') if args.outdir is None else args.outdir
distributions: list[Distribution] = args.distributions
if distributions:
build_call = build_package
else:
build_call = build_package_via_sdist
distributions = ['wheel']
with _handle_build_error():
built = build_call(
args.srcdir,
outdir,
distributions,
config_settings,
not args.no_isolation,
args.skip_dependency_check,
args.installer,
)
artifact_list = _natural_language_list(
['{underline}{}{reset}{bold}{green}'.format(artifact, **_styles.get()) for artifact in built]
)
_cprint('{bold}{green}Successfully built {}{reset}', artifact_list)
def entrypoint() -> None:
main(sys.argv[1:])
if __name__ == '__main__': # pragma: no cover
main(sys.argv[1:], 'python -m build')
__all__ = [
'main',
'main_parser',
]

View File

@ -0,0 +1,355 @@
# SPDX-License-Identifier: MIT
from __future__ import annotations
import contextlib
import difflib
import os
import subprocess
import sys
import warnings
import zipfile
from collections.abc import Iterator
from typing import Any, Mapping, Sequence, TypeVar
import pyproject_hooks
from . import _ctx, env
from ._compat import tomllib
from ._exceptions import (
BuildBackendException,
BuildException,
BuildSystemTableValidationError,
TypoWarning,
)
from ._types import ConfigSettings, Distribution, StrPath, SubprocessRunner
from ._util import check_dependency, parse_wheel_filename
_TProjectBuilder = TypeVar('_TProjectBuilder', bound='ProjectBuilder')
_DEFAULT_BACKEND = {
'build-backend': 'setuptools.build_meta:__legacy__',
'requires': ['setuptools >= 40.8.0'],
}
def _find_typo(dictionary: Mapping[str, str], expected: str) -> None:
for obj in dictionary:
if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
warnings.warn(
f"Found '{obj}' in pyproject.toml, did you mean '{expected}'?",
TypoWarning,
stacklevel=2,
)
def _validate_source_directory(source_dir: StrPath) -> None:
if not os.path.isdir(source_dir):
msg = f'Source {source_dir} is not a directory'
raise BuildException(msg)
pyproject_toml = os.path.join(source_dir, 'pyproject.toml')
setup_py = os.path.join(source_dir, 'setup.py')
if not os.path.exists(pyproject_toml) and not os.path.exists(setup_py):
msg = f'Source {source_dir} does not appear to be a Python project: no pyproject.toml or setup.py'
raise BuildException(msg)
def _read_pyproject_toml(path: StrPath) -> Mapping[str, Any]:
try:
with open(path, 'rb') as f:
return tomllib.loads(f.read().decode())
except FileNotFoundError:
return {}
except PermissionError as e:
msg = f"{e.strerror}: '{e.filename}' "
raise BuildException(msg) from None
except tomllib.TOMLDecodeError as e:
msg = f'Failed to parse {path}: {e} '
raise BuildException(msg) from None
def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Mapping[str, Any]:
# If pyproject.toml is missing (per PEP 517) or [build-system] is missing
# (per PEP 518), use default values
if 'build-system' not in pyproject_toml:
_find_typo(pyproject_toml, 'build-system')
return _DEFAULT_BACKEND
build_system_table = dict(pyproject_toml['build-system'])
# If [build-system] is present, it must have a ``requires`` field (per PEP 518)
if 'requires' not in build_system_table:
_find_typo(build_system_table, 'requires')
msg = '`requires` is a required property'
raise BuildSystemTableValidationError(msg)
elif not isinstance(build_system_table['requires'], list) or not all(
isinstance(i, str) for i in build_system_table['requires']
):
msg = '`requires` must be an array of strings'
raise BuildSystemTableValidationError(msg)
if 'build-backend' not in build_system_table:
_find_typo(build_system_table, 'build-backend')
# If ``build-backend`` is missing, inject the legacy setuptools backend
# but leave ``requires`` intact to emulate pip
build_system_table['build-backend'] = _DEFAULT_BACKEND['build-backend']
elif not isinstance(build_system_table['build-backend'], str):
msg = '`build-backend` must be a string'
raise BuildSystemTableValidationError(msg)
if 'backend-path' in build_system_table and (
not isinstance(build_system_table['backend-path'], list)
or not all(isinstance(i, str) for i in build_system_table['backend-path'])
):
msg = '`backend-path` must be an array of strings'
raise BuildSystemTableValidationError(msg)
unknown_props = build_system_table.keys() - {'requires', 'build-backend', 'backend-path'}
if unknown_props:
msg = f'Unknown properties: {", ".join(unknown_props)}'
raise BuildSystemTableValidationError(msg)
return build_system_table
def _wrap_subprocess_runner(runner: SubprocessRunner, env: env.IsolatedEnv) -> SubprocessRunner:
def _invoke_wrapped_runner(
cmd: Sequence[str], cwd: str | None = None, extra_environ: Mapping[str, str] | None = None
) -> None:
runner(cmd, cwd, {**(env.make_extra_environ() or {}), **(extra_environ or {})})
return _invoke_wrapped_runner
class ProjectBuilder:
"""
The PEP 517 consumer API.
"""
def __init__(
self,
source_dir: StrPath,
python_executable: str = sys.executable,
runner: SubprocessRunner = pyproject_hooks.default_subprocess_runner,
) -> None:
"""
:param source_dir: The source directory
:param python_executable: The python executable where the backend lives
:param runner: Runner for backend subprocesses
The ``runner``, if provided, must accept the following arguments:
- ``cmd``: a list of strings representing the command and arguments to
execute, as would be passed to e.g. 'subprocess.check_call'.
- ``cwd``: a string representing the working directory that must be
used for the subprocess. Corresponds to the provided source_dir.
- ``extra_environ``: a dict mapping environment variable names to values
which must be set for the subprocess execution.
The default runner simply calls the backend hooks in a subprocess, writing backend output
to stdout/stderr.
"""
self._source_dir: str = os.path.abspath(source_dir)
_validate_source_directory(source_dir)
self._python_executable = python_executable
self._runner = runner
pyproject_toml_path = os.path.join(source_dir, 'pyproject.toml')
self._build_system = _parse_build_system_table(_read_pyproject_toml(pyproject_toml_path))
self._backend = self._build_system['build-backend']
self._hook = pyproject_hooks.BuildBackendHookCaller(
self._source_dir,
self._backend,
backend_path=self._build_system.get('backend-path'),
python_executable=self._python_executable,
runner=self._runner,
)
@classmethod
def from_isolated_env(
cls: type[_TProjectBuilder],
env: env.IsolatedEnv,
source_dir: StrPath,
runner: SubprocessRunner = pyproject_hooks.default_subprocess_runner,
) -> _TProjectBuilder:
return cls(
source_dir=source_dir,
python_executable=env.python_executable,
runner=_wrap_subprocess_runner(runner, env),
)
@property
def source_dir(self) -> str:
"""Project source directory."""
return self._source_dir
@property
def python_executable(self) -> str:
"""
The Python executable used to invoke the backend.
"""
return self._python_executable
@property
def build_system_requires(self) -> set[str]:
"""
The dependencies defined in the ``pyproject.toml``'s
``build-system.requires`` field or the default build dependencies
if ``pyproject.toml`` is missing or ``build-system`` is undefined.
"""
return set(self._build_system['requires'])
def get_requires_for_build(
self,
distribution: Distribution,
config_settings: ConfigSettings | None = None,
) -> set[str]:
"""
Return the dependencies defined by the backend in addition to
:attr:`build_system_requires` for a given distribution.
:param distribution: Distribution to get the dependencies of
(``sdist`` or ``wheel``)
:param config_settings: Config settings for the build backend
"""
_ctx.log(f'Getting build dependencies for {distribution}...')
hook_name = f'get_requires_for_build_{distribution}'
get_requires = getattr(self._hook, hook_name)
with self._handle_backend(hook_name):
return set(get_requires(config_settings))
def check_dependencies(
self,
distribution: Distribution,
config_settings: ConfigSettings | None = None,
) -> set[tuple[str, ...]]:
"""
Return the dependencies which are not satisfied from the combined set of
:attr:`build_system_requires` and :meth:`get_requires_for_build` for a given
distribution.
:param distribution: Distribution to check (``sdist`` or ``wheel``)
:param config_settings: Config settings for the build backend
:returns: Set of variable-length unmet dependency tuples
"""
dependencies = self.get_requires_for_build(distribution, config_settings).union(self.build_system_requires)
return {u for d in dependencies for u in check_dependency(d)}
def prepare(
self,
distribution: Distribution,
output_directory: StrPath,
config_settings: ConfigSettings | None = None,
) -> str | None:
"""
Prepare metadata for a distribution.
:param distribution: Distribution to build (must be ``wheel``)
:param output_directory: Directory to put the prepared metadata in
:param config_settings: Config settings for the build backend
:returns: The full path to the prepared metadata directory
"""
_ctx.log(f'Getting metadata for {distribution}...')
try:
return self._call_backend(
f'prepare_metadata_for_build_{distribution}',
output_directory,
config_settings,
_allow_fallback=False,
)
except BuildBackendException as exception:
if isinstance(exception.exception, pyproject_hooks.HookMissing):
return None
raise
def build(
self,
distribution: Distribution,
output_directory: StrPath,
config_settings: ConfigSettings | None = None,
metadata_directory: str | None = None,
) -> str:
"""
Build a distribution.
:param distribution: Distribution to build (``sdist`` or ``wheel``)
:param output_directory: Directory to put the built distribution in
:param config_settings: Config settings for the build backend
:param metadata_directory: If provided, should be the return value of a
previous ``prepare`` call on the same ``distribution`` kind
:returns: The full path to the built distribution
"""
_ctx.log(f'Building {distribution}...')
kwargs = {} if metadata_directory is None else {'metadata_directory': metadata_directory}
return self._call_backend(f'build_{distribution}', output_directory, config_settings, **kwargs)
def metadata_path(self, output_directory: StrPath) -> str:
"""
Generate the metadata directory of a distribution and return its path.
If the backend does not support the ``prepare_metadata_for_build_wheel``
hook, a wheel will be built and the metadata will be extracted from it.
:param output_directory: Directory to put the metadata distribution in
:returns: The path of the metadata directory
"""
# prepare_metadata hook
metadata = self.prepare('wheel', output_directory)
if metadata is not None:
return metadata
# fallback to build_wheel hook
wheel = self.build('wheel', output_directory)
match = parse_wheel_filename(os.path.basename(wheel))
if not match:
msg = 'Invalid wheel'
raise ValueError(msg)
distinfo = f"{match['distribution']}-{match['version']}.dist-info"
member_prefix = f'{distinfo}/'
with zipfile.ZipFile(wheel) as w:
w.extractall(
output_directory,
(member for member in w.namelist() if member.startswith(member_prefix)),
)
return os.path.join(output_directory, distinfo)
def _call_backend(
self, hook_name: str, outdir: StrPath, config_settings: ConfigSettings | None = None, **kwargs: Any
) -> str:
outdir = os.path.abspath(outdir)
callback = getattr(self._hook, hook_name)
if os.path.exists(outdir):
if not os.path.isdir(outdir):
msg = f"Build path '{outdir}' exists and is not a directory"
raise BuildException(msg)
else:
os.makedirs(outdir)
with self._handle_backend(hook_name):
basename: str = callback(outdir, config_settings, **kwargs)
return os.path.join(outdir, basename)
@contextlib.contextmanager
def _handle_backend(self, hook: str) -> Iterator[None]:
try:
yield
except pyproject_hooks.BackendUnavailable as exception:
raise BuildBackendException(
exception,
f"Backend '{self._backend}' is not available.",
sys.exc_info(),
) from None
except subprocess.CalledProcessError as exception:
raise BuildBackendException(exception, f'Backend subprocess exited when trying to invoke {hook}') from None
except Exception as exception:
raise BuildBackendException(exception, exc_info=sys.exc_info()) from None

View File

@ -0,0 +1,22 @@
from __future__ import annotations
import sys
import typing
if typing.TYPE_CHECKING:
import importlib_metadata as metadata
else:
if sys.version_info >= (3, 10, 2):
from importlib import metadata
else:
try:
import importlib_metadata as metadata
except ModuleNotFoundError:
# helps bootstrapping when dependencies aren't installed
from importlib import metadata
__all__ = [
'metadata',
]

View File

@ -0,0 +1,32 @@
from __future__ import annotations
import sys
import tarfile
import typing
if typing.TYPE_CHECKING:
TarFile = tarfile.TarFile
else:
# Per https://peps.python.org/pep-0706/, the "data" filter will become
# the default in Python 3.14. The first series of releases with the filter
# had a broken filter that could not process symlinks correctly.
if (
(3, 8, 18) <= sys.version_info < (3, 9)
or (3, 9, 18) <= sys.version_info < (3, 10)
or (3, 10, 13) <= sys.version_info < (3, 11)
or (3, 11, 5) <= sys.version_info < (3, 12)
or (3, 12) <= sys.version_info < (3, 14)
):
class TarFile(tarfile.TarFile):
extraction_filter = staticmethod(tarfile.data_filter)
else:
TarFile = tarfile.TarFile
__all__ = [
'TarFile',
]

View File

@ -0,0 +1,16 @@
from __future__ import annotations
import sys
if sys.version_info >= (3, 11):
from tomllib import TOMLDecodeError, load, loads
else:
from tomli import TOMLDecodeError, load, loads
__all__ = [
'TOMLDecodeError',
'load',
'loads',
]

View File

@ -0,0 +1,98 @@
from __future__ import annotations
import contextvars
import logging
import subprocess
import typing
from collections.abc import Mapping, Sequence
from functools import partial
from ._types import StrPath
class _Logger(typing.Protocol): # pragma: no cover
def __call__(self, message: str, *, origin: tuple[str, ...] | None = None) -> None: ...
_package_name = __spec__.parent # type: ignore[name-defined]
_default_logger = logging.getLogger(_package_name)
def _log_default(message: str, *, origin: tuple[str, ...] | None = None) -> None:
if origin is None:
_default_logger.log(logging.INFO, message, stacklevel=2)
LOGGER = contextvars.ContextVar('LOGGER', default=_log_default)
VERBOSITY = contextvars.ContextVar('VERBOSITY', default=0)
def log_subprocess_error(error: subprocess.CalledProcessError) -> None:
log = LOGGER.get()
log(subprocess.list2cmdline(error.cmd), origin=('subprocess', 'cmd'))
for stream_name in ('stdout', 'stderr'):
stream = getattr(error, stream_name)
if stream:
log(stream.decode() if isinstance(stream, bytes) else stream, origin=('subprocess', stream_name))
def run_subprocess(cmd: Sequence[StrPath], env: Mapping[str, str] | None = None) -> None:
verbosity = VERBOSITY.get()
if verbosity:
import concurrent.futures
log = LOGGER.get()
def log_stream(stream_name: str, stream: typing.IO[str]) -> None:
for line in stream:
log(line, origin=('subprocess', stream_name))
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor, subprocess.Popen(
cmd, encoding='utf-8', env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as process:
log(subprocess.list2cmdline(cmd), origin=('subprocess', 'cmd'))
# Logging in sub-thread to more-or-less ensure order of stdout and stderr whilst also
# being able to distinguish between the two.
concurrent.futures.wait(
[executor.submit(partial(log_stream, n, getattr(process, n))) for n in ('stdout', 'stderr')]
)
code = process.wait()
if code:
raise subprocess.CalledProcessError(code, process.args)
else:
try:
subprocess.run(cmd, capture_output=True, check=True, env=env)
except subprocess.CalledProcessError as error:
log_subprocess_error(error)
raise
if typing.TYPE_CHECKING:
log: _Logger
verbosity: bool
else:
def __getattr__(name):
if name == 'log':
return LOGGER.get()
elif name == 'verbosity':
return VERBOSITY.get()
raise AttributeError(name) # pragma: no cover
__all__ = [
'log_subprocess_error',
'log',
'run_subprocess',
'LOGGER',
'verbosity',
'VERBOSITY',
]

View File

@ -0,0 +1,65 @@
from __future__ import annotations
import subprocess
import types
class BuildException(Exception):
"""
Exception raised by :class:`build.ProjectBuilder`.
"""
class BuildBackendException(Exception):
"""
Exception raised when a backend operation fails.
"""
def __init__(
self,
exception: Exception,
description: str | None = None,
exc_info: tuple[type[BaseException], BaseException, types.TracebackType] | tuple[None, None, None] = (
None,
None,
None,
),
) -> None:
super().__init__()
self.exception = exception
self.exc_info = exc_info
self._description = description
def __str__(self) -> str:
if self._description:
return self._description
return f'Backend operation failed: {self.exception!r}'
class BuildSystemTableValidationError(BuildException):
"""
Exception raised when the ``[build-system]`` table in pyproject.toml is invalid.
"""
def __str__(self) -> str:
return f'Failed to validate `build-system` in pyproject.toml: {self.args[0]}'
class FailedProcessError(Exception):
"""
Exception raised when a setup or preparation operation fails.
"""
def __init__(self, exception: subprocess.CalledProcessError, description: str) -> None:
super().__init__()
self.exception = exception
self._description = description
def __str__(self) -> str:
return self._description
class TypoWarning(Warning):
"""
Warning raised when a possible typo is found.
"""

View File

@ -0,0 +1,23 @@
from __future__ import annotations
import os
import sys
import typing
__all__ = ['ConfigSettings', 'Distribution', 'StrPath', 'SubprocessRunner']
ConfigSettings = typing.Mapping[str, typing.Union[str, typing.Sequence[str]]]
Distribution = typing.Literal['sdist', 'wheel', 'editable']
if typing.TYPE_CHECKING or sys.version_info > (3, 9):
StrPath = typing.Union[str, os.PathLike[str]]
else:
StrPath = typing.Union[str, os.PathLike]
if typing.TYPE_CHECKING:
from pyproject_hooks import SubprocessRunner
else:
SubprocessRunner = typing.Callable[
[typing.Sequence[str], typing.Optional[str], typing.Optional[typing.Mapping[str, str]]], None
]

View File

@ -0,0 +1,63 @@
from __future__ import annotations
import re
from collections.abc import Iterator, Set
_WHEEL_FILENAME_REGEX = re.compile(
r'(?P<distribution>.+)-(?P<version>.+)'
r'(-(?P<build_tag>.+))?-(?P<python_tag>.+)'
r'-(?P<abi_tag>.+)-(?P<platform_tag>.+)\.whl'
)
def check_dependency(
req_string: str, ancestral_req_strings: tuple[str, ...] = (), parent_extras: Set[str] = frozenset()
) -> Iterator[tuple[str, ...]]:
"""
Verify that a dependency and all of its dependencies are met.
:param req_string: Requirement string
:param parent_extras: Extras (eg. "test" in myproject[test])
:yields: Unmet dependencies
"""
import packaging.requirements
from ._compat import importlib
req = packaging.requirements.Requirement(req_string)
normalised_req_string = str(req)
# ``Requirement`` doesn't implement ``__eq__`` so we cannot compare reqs for
# equality directly but the string representation is stable.
if normalised_req_string in ancestral_req_strings:
# cyclical dependency, already checked.
return
if req.marker:
extras = frozenset(('',)).union(parent_extras)
# a requirement can have multiple extras but ``evaluate`` can
# only check one at a time.
if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
# if the marker conditions are not met, we pretend that the
# dependency is satisfied.
return
try:
dist = importlib.metadata.distribution(req.name)
except importlib.metadata.PackageNotFoundError:
# dependency is not installed in the environment.
yield (*ancestral_req_strings, normalised_req_string)
else:
if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
# the installed version is incompatible.
yield (*ancestral_req_strings, normalised_req_string)
elif dist.requires:
for other_req_string in dist.requires:
# yields transitive dependencies that are not satisfied.
yield from check_dependency(other_req_string, (*ancestral_req_strings, normalised_req_string), req.extras)
def parse_wheel_filename(filename: str) -> re.Match[str] | None:
return _WHEEL_FILENAME_REGEX.match(filename)

View File

@ -0,0 +1,372 @@
from __future__ import annotations
import abc
import functools
import importlib.util
import os
import platform
import shutil
import subprocess
import sys
import sysconfig
import tempfile
import typing
from collections.abc import Collection, Mapping
from . import _ctx
from ._ctx import run_subprocess
from ._exceptions import FailedProcessError
from ._util import check_dependency
Installer = typing.Literal['pip', 'uv']
INSTALLERS = typing.get_args(Installer)
class IsolatedEnv(typing.Protocol):
"""Isolated build environment ABC."""
@property
@abc.abstractmethod
def python_executable(self) -> str:
"""The Python executable of the isolated environment."""
@abc.abstractmethod
def make_extra_environ(self) -> Mapping[str, str] | None:
"""Generate additional env vars specific to the isolated environment."""
def _has_dependency(name: str, minimum_version_str: str | None = None, /, **distargs: object) -> bool | None:
"""
Given a path, see if a package is present and return True if the version is
sufficient for build, False if it is not, None if the package is missing.
"""
from packaging.version import Version
from ._compat import importlib
try:
distribution = next(iter(importlib.metadata.distributions(name=name, **distargs)))
except StopIteration:
return None
if minimum_version_str is None:
return True
return Version(distribution.version) >= Version(minimum_version_str)
class DefaultIsolatedEnv(IsolatedEnv):
"""
Isolated environment which supports several different underlying implementations.
"""
def __init__(
self,
*,
installer: Installer = 'pip',
) -> None:
self.installer: Installer = installer
def __enter__(self) -> DefaultIsolatedEnv:
try:
path = tempfile.mkdtemp(prefix='build-env-')
# Call ``realpath`` to prevent spurious warning from being emitted
# that the venv location has changed on Windows for the venv impl.
# The username is DOS-encoded in the output of tempfile - the location is the same
# but the representation of it is different, which confuses venv.
# Ref: https://bugs.python.org/issue46171
path = os.path.realpath(path)
self._path = path
self._env_backend: _EnvBackend
# uv is opt-in only.
if self.installer == 'uv':
self._env_backend = _UvBackend()
else:
self._env_backend = _PipBackend()
_ctx.log(f'Creating isolated environment: {self._env_backend.display_name}...')
self._env_backend.create(self._path)
except Exception: # cleanup folder if creation fails
self.__exit__(*sys.exc_info())
raise
return self
def __exit__(self, *args: object) -> None:
if os.path.exists(self._path): # in case the user already deleted skip remove
shutil.rmtree(self._path)
@property
def path(self) -> str:
"""The location of the isolated build environment."""
return self._path
@property
def python_executable(self) -> str:
"""The python executable of the isolated build environment."""
return self._env_backend.python_executable
def make_extra_environ(self) -> dict[str, str]:
path = os.environ.get('PATH')
return {
'PATH': os.pathsep.join([self._env_backend.scripts_dir, path])
if path is not None
else self._env_backend.scripts_dir
}
def install(self, requirements: Collection[str]) -> None:
"""
Install packages from PEP 508 requirements in the isolated build environment.
:param requirements: PEP 508 requirement specification to install
:note: Passing non-PEP 508 strings will result in undefined behavior, you *should not* rely on it. It is
merely an implementation detail, it may change any time without warning.
"""
if not requirements:
return
_ctx.log('Installing packages in isolated environment:\n' + '\n'.join(f'- {r}' for r in sorted(requirements)))
self._env_backend.install_requirements(requirements)
class _EnvBackend(typing.Protocol): # pragma: no cover
python_executable: str
scripts_dir: str
def create(self, path: str) -> None: ...
def install_requirements(self, requirements: Collection[str]) -> None: ...
@property
def display_name(self) -> str: ...
class _PipBackend(_EnvBackend):
def __init__(self) -> None:
self._create_with_virtualenv = not self._has_valid_outer_pip and self._has_virtualenv
@functools.cached_property
def _has_valid_outer_pip(self) -> bool | None:
"""
This checks for a valid global pip. Returns None if pip is missing, False
if pip is too old, and True if it can be used.
"""
# Version to have added the `--python` option.
return _has_dependency('pip', '22.3')
@functools.cached_property
def _has_virtualenv(self) -> bool:
"""
virtualenv might be incompatible if it was installed separately
from build. This verifies that virtualenv and all of its
dependencies are installed as required by build.
"""
from packaging.requirements import Requirement
name = 'virtualenv'
return importlib.util.find_spec(name) is not None and not any(
Requirement(d[1]).name == name for d in check_dependency(f'build[{name}]') if len(d) > 1
)
@staticmethod
def _get_minimum_pip_version_str() -> str:
if platform.system() == 'Darwin':
release, _, machine = platform.mac_ver()
if int(release[: release.find('.')]) >= 11:
# macOS 11+ name scheme change requires 20.3. Intel macOS 11.0 can be
# told to report 10.16 for backwards compatibility; but that also fixes
# earlier versions of pip so this is only needed for 11+.
is_apple_silicon_python = machine != 'x86_64'
return '21.0.1' if is_apple_silicon_python else '20.3.0'
# PEP-517 and manylinux1 was first implemented in 19.1
return '19.1.0'
def create(self, path: str) -> None:
if self._create_with_virtualenv:
import virtualenv
result = virtualenv.cli_run(
[
path,
'--activators',
'',
'--no-setuptools',
'--no-wheel',
],
setup_logging=False,
)
# The creator attributes are `pathlib.Path`s.
self.python_executable = str(result.creator.exe)
self.scripts_dir = str(result.creator.script_dir)
else:
import venv
with_pip = not self._has_valid_outer_pip
try:
venv.EnvBuilder(symlinks=_fs_supports_symlink(), with_pip=with_pip).create(path)
except subprocess.CalledProcessError as exc:
_ctx.log_subprocess_error(exc)
raise FailedProcessError(exc, 'Failed to create venv. Maybe try installing virtualenv.') from None
self.python_executable, self.scripts_dir, purelib = _find_executable_and_scripts(path)
if with_pip:
minimum_pip_version_str = self._get_minimum_pip_version_str()
if not _has_dependency(
'pip',
minimum_pip_version_str,
path=[purelib],
):
run_subprocess([self.python_executable, '-Im', 'pip', 'install', f'pip>={minimum_pip_version_str}'])
# Uninstall setuptools from the build env to prevent depending on it implicitly.
# Pythons 3.12 and up do not install setuptools, check if it exists first.
if _has_dependency(
'setuptools',
path=[purelib],
):
run_subprocess([self.python_executable, '-Im', 'pip', 'uninstall', '-y', 'setuptools'])
def install_requirements(self, requirements: Collection[str]) -> None:
# pip does not honour environment markers in command line arguments
# but it does from requirement files.
with tempfile.NamedTemporaryFile('w', prefix='build-reqs-', suffix='.txt', delete=False, encoding='utf-8') as req_file:
req_file.write(os.linesep.join(requirements))
try:
if self._has_valid_outer_pip:
cmd = [sys.executable, '-m', 'pip', '--python', self.python_executable]
else:
cmd = [self.python_executable, '-Im', 'pip']
if _ctx.verbosity > 1:
cmd += [f'-{"v" * (_ctx.verbosity - 1)}']
cmd += [
'install',
'--use-pep517',
'--no-warn-script-location',
'--no-compile',
'-r',
os.path.abspath(req_file.name),
]
run_subprocess(cmd)
finally:
os.unlink(req_file.name)
@property
def display_name(self) -> str:
return 'virtualenv+pip' if self._create_with_virtualenv else 'venv+pip'
class _UvBackend(_EnvBackend):
def create(self, path: str) -> None:
import venv
self._env_path = path
try:
import uv
self._uv_bin = uv.find_uv_bin()
except (ModuleNotFoundError, FileNotFoundError):
uv_bin = shutil.which('uv')
if uv_bin is None:
msg = 'uv executable not found'
raise RuntimeError(msg) from None
_ctx.log(f'Using external uv from {uv_bin}')
self._uv_bin = uv_bin
venv.EnvBuilder(symlinks=_fs_supports_symlink(), with_pip=False).create(self._env_path)
self.python_executable, self.scripts_dir, _ = _find_executable_and_scripts(self._env_path)
def install_requirements(self, requirements: Collection[str]) -> None:
cmd = [self._uv_bin, 'pip']
if _ctx.verbosity > 1:
cmd += [f'-{"v" * min(2, _ctx.verbosity - 1)}']
run_subprocess([*cmd, 'install', *requirements], env={**os.environ, 'VIRTUAL_ENV': self._env_path})
@property
def display_name(self) -> str:
return 'venv+uv'
@functools.lru_cache(maxsize=None)
def _fs_supports_symlink() -> bool:
"""Return True if symlinks are supported"""
# Using definition used by venv.main()
if os.name != 'nt':
return True
# Windows may support symlinks (setting in Windows 10)
with tempfile.NamedTemporaryFile(prefix='build-symlink-') as tmp_file:
dest = f'{tmp_file}-b'
try:
os.symlink(tmp_file.name, dest)
os.unlink(dest)
except (OSError, NotImplementedError, AttributeError):
return False
return True
def _find_executable_and_scripts(path: str) -> tuple[str, str, str]:
"""
Detect the Python executable and script folder of a virtual environment.
:param path: The location of the virtual environment
:return: The Python executable, script folder, and purelib folder
"""
config_vars = sysconfig.get_config_vars().copy() # globally cached, copy before altering it
config_vars['base'] = path
scheme_names = sysconfig.get_scheme_names()
if 'venv' in scheme_names:
# Python distributors with custom default installation scheme can set a
# scheme that can't be used to expand the paths in a venv.
# This can happen if build itself is not installed in a venv.
# The distributors are encouraged to set a "venv" scheme to be used for this.
# See https://bugs.python.org/issue45413
# and https://github.com/pypa/virtualenv/issues/2208
paths = sysconfig.get_paths(scheme='venv', vars=config_vars)
elif 'posix_local' in scheme_names:
# The Python that ships on Debian/Ubuntu varies the default scheme to
# install to /usr/local
# But it does not (yet) set the "venv" scheme.
# If we're the Debian "posix_local" scheme is available, but "venv"
# is not, we use "posix_prefix" instead which is venv-compatible there.
paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
elif 'osx_framework_library' in scheme_names:
# The Python that ships with the macOS developer tools varies the
# default scheme depending on whether the ``sys.prefix`` is part of a framework.
# But it does not (yet) set the "venv" scheme.
# If the Apple-custom "osx_framework_library" scheme is available but "venv"
# is not, we use "posix_prefix" instead which is venv-compatible there.
paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
else:
paths = sysconfig.get_paths(vars=config_vars)
executable = os.path.join(paths['scripts'], 'python.exe' if os.name == 'nt' else 'python')
if not os.path.exists(executable):
msg = f'Virtual environment creation failed, executable {executable} missing'
raise RuntimeError(msg)
return executable, paths['scripts'], paths['purelib']
__all__ = [
'IsolatedEnv',
'DefaultIsolatedEnv',
]

View File

@ -0,0 +1,61 @@
# SPDX-License-Identifier: MIT
from __future__ import annotations
import pathlib
import tempfile
import pyproject_hooks
from . import ProjectBuilder
from ._compat import importlib
from ._types import StrPath, SubprocessRunner
from .env import DefaultIsolatedEnv
def _project_wheel_metadata(builder: ProjectBuilder) -> importlib.metadata.PackageMetadata:
with tempfile.TemporaryDirectory() as tmpdir:
path = pathlib.Path(builder.metadata_path(tmpdir))
return importlib.metadata.PathDistribution(path).metadata
def project_wheel_metadata(
source_dir: StrPath,
isolated: bool = True,
*,
runner: SubprocessRunner = pyproject_hooks.quiet_subprocess_runner,
) -> importlib.metadata.PackageMetadata:
"""
Return the wheel metadata for a project.
Uses the ``prepare_metadata_for_build_wheel`` hook if available,
otherwise ``build_wheel``.
:param source_dir: Project source directory
:param isolated: Whether or not to run invoke the backend in the current
environment or to create an isolated one and invoke it
there.
:param runner: An alternative runner for backend subprocesses
"""
if isolated:
with DefaultIsolatedEnv() as env:
builder = ProjectBuilder.from_isolated_env(
env,
source_dir,
runner=runner,
)
env.install(builder.build_system_requires)
env.install(builder.get_requires_for_build('wheel'))
return _project_wheel_metadata(builder)
else:
builder = ProjectBuilder(
source_dir,
runner=runner,
)
return _project_wheel_metadata(builder)
__all__ = [
'project_wheel_metadata',
]

View File

@ -0,0 +1,105 @@
Metadata-Version: 2.4
Name: packaging
Version: 25.0
Summary: Core utilities for Python packages
Author-email: Donald Stufft <donald@stufft.io>
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: License :: OSI Approved :: BSD License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Typing :: Typed
License-File: LICENSE
License-File: LICENSE.APACHE
License-File: LICENSE.BSD
Project-URL: Documentation, https://packaging.pypa.io/
Project-URL: Source, https://github.com/pypa/packaging
packaging
=========
.. start-intro
Reusable core utilities for various Python Packaging
`interoperability specifications <https://packaging.python.org/specifications/>`_.
This library provides utilities that implement the interoperability
specifications which have clearly one correct behaviour (eg: :pep:`440`)
or benefit greatly from having a single shared implementation (eg: :pep:`425`).
.. end-intro
The ``packaging`` project includes the following: version handling, specifiers,
markers, requirements, tags, utilities.
Documentation
-------------
The `documentation`_ provides information and the API for the following:
- Version Handling
- Specifiers
- Markers
- Requirements
- Tags
- Utilities
Installation
------------
Use ``pip`` to install these utilities::
pip install packaging
The ``packaging`` library uses calendar-based versioning (``YY.N``).
Discussion
----------
If you run into bugs, you can file them in our `issue tracker`_.
You can also join ``#pypa`` on Freenode to ask questions or get involved.
.. _`documentation`: https://packaging.pypa.io/
.. _`issue tracker`: https://github.com/pypa/packaging/issues
Code of Conduct
---------------
Everyone interacting in the packaging project's codebases, issue trackers, chat
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
Contributing
------------
The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
well as how to report a potential security issue. The documentation for this
project also covers information about `project development`_ and `security`_.
.. _`project development`: https://packaging.pypa.io/en/latest/development/
.. _`security`: https://packaging.pypa.io/en/latest/security/
Project History
---------------
Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
recent changes and project history.
.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/

View File

@ -0,0 +1,40 @@
packaging-25.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
packaging-25.0.dist-info/METADATA,sha256=W2EaYJw4_vw9YWv0XSCuyY-31T8kXayp4sMPyFx6woI,3281
packaging-25.0.dist-info/RECORD,,
packaging-25.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
packaging-25.0.dist-info/licenses/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
packaging-25.0.dist-info/licenses/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
packaging-25.0.dist-info/licenses/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
packaging/__init__.py,sha256=_0cDiPVf2S-bNfVmZguxxzmrIYWlyASxpqph4qsJWUc,494
packaging/__pycache__/__init__.cpython-312.pyc,,
packaging/__pycache__/_elffile.cpython-312.pyc,,
packaging/__pycache__/_manylinux.cpython-312.pyc,,
packaging/__pycache__/_musllinux.cpython-312.pyc,,
packaging/__pycache__/_parser.cpython-312.pyc,,
packaging/__pycache__/_structures.cpython-312.pyc,,
packaging/__pycache__/_tokenizer.cpython-312.pyc,,
packaging/__pycache__/markers.cpython-312.pyc,,
packaging/__pycache__/metadata.cpython-312.pyc,,
packaging/__pycache__/requirements.cpython-312.pyc,,
packaging/__pycache__/specifiers.cpython-312.pyc,,
packaging/__pycache__/tags.cpython-312.pyc,,
packaging/__pycache__/utils.cpython-312.pyc,,
packaging/__pycache__/version.cpython-312.pyc,,
packaging/_elffile.py,sha256=UkrbDtW7aeq3qqoAfU16ojyHZ1xsTvGke_WqMTKAKd0,3286
packaging/_manylinux.py,sha256=t4y_-dTOcfr36gLY-ztiOpxxJFGO2ikC11HgfysGxiM,9596
packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
packaging/_parser.py,sha256=gYfnj0pRHflVc4RHZit13KNTyN9iiVcU2RUCGi22BwM,10221
packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
packaging/_tokenizer.py,sha256=OYzt7qKxylOAJ-q0XyK1qAycyPRYLfMPdGQKRXkZWyI,5310
packaging/licenses/__init__.py,sha256=VsK4o27CJXWfTi8r2ybJmsBoCdhpnBWuNrskaCVKP7U,5715
packaging/licenses/__pycache__/__init__.cpython-312.pyc,,
packaging/licenses/__pycache__/_spdx.cpython-312.pyc,,
packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
packaging/markers.py,sha256=P0we27jm1xUzgGMJxBjtUFCIWeBxTsMeJTOJ6chZmAY,12049
packaging/metadata.py,sha256=8IZErqQQnNm53dZZuYq4FGU4_dpyinMeH1QFBIWIkfE,34739
packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
packaging/specifiers.py,sha256=gtPu5DTc-F9baLq3FTGEK6dPhHGCuwwZetaY0PSV2gs,40055
packaging/tags.py,sha256=41s97W9Zatrq2Ed7Rc3qeBDaHe8pKKvYq2mGjwahfXk,22745
packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676

View File

@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: flit 3.12.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,3 @@
This software is made available under the terms of *either* of the licenses
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
under the terms of *both* these licenses.

View File

@ -0,0 +1,177 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

View File

@ -0,0 +1,23 @@
Copyright (c) Donald Stufft and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,15 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "25.0"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "BSD-2-Clause or Apache-2.0"
__copyright__ = f"2014 {__author__}"

View File

@ -0,0 +1,109 @@
"""
ELF file parser.
This provides a class ``ELFFile`` that parses an ELF executable in a similar
interface to ``ZipFile``. Only the read interface is implemented.
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
"""
from __future__ import annotations
import enum
import os
import struct
from typing import IO
class ELFInvalid(ValueError):
pass
class EIClass(enum.IntEnum):
C32 = 1
C64 = 2
class EIData(enum.IntEnum):
Lsb = 1
Msb = 2
class EMachine(enum.IntEnum):
I386 = 3
S390 = 22
Arm = 40
X8664 = 62
AArc64 = 183
class ELFFile:
"""
Representation of an ELF executable.
"""
def __init__(self, f: IO[bytes]) -> None:
self._f = f
try:
ident = self._read("16B")
except struct.error as e:
raise ELFInvalid("unable to parse identification") from e
magic = bytes(ident[:4])
if magic != b"\x7fELF":
raise ELFInvalid(f"invalid magic: {magic!r}")
self.capacity = ident[4] # Format for program header (bitness).
self.encoding = ident[5] # Data structure encoding (endianness).
try:
# e_fmt: Format for program header.
# p_fmt: Format for section header.
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
e_fmt, self._p_fmt, self._p_idx = {
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
}[(self.capacity, self.encoding)]
except KeyError as e:
raise ELFInvalid(
f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})"
) from e
try:
(
_,
self.machine, # Architecture type.
_,
_,
self._e_phoff, # Offset of program header.
_,
self.flags, # Processor-specific flags.
_,
self._e_phentsize, # Size of section.
self._e_phnum, # Number of sections.
) = self._read(e_fmt)
except struct.error as e:
raise ELFInvalid("unable to parse machine and section information") from e
def _read(self, fmt: str) -> tuple[int, ...]:
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
@property
def interpreter(self) -> str | None:
"""
The path recorded in the ``PT_INTERP`` section header.
"""
for index in range(self._e_phnum):
self._f.seek(self._e_phoff + self._e_phentsize * index)
try:
data = self._read(self._p_fmt)
except struct.error:
continue
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
continue
self._f.seek(data[self._p_idx[1]])
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
return None

View File

@ -0,0 +1,262 @@
from __future__ import annotations
import collections
import contextlib
import functools
import os
import re
import sys
import warnings
from typing import Generator, Iterator, NamedTuple, Sequence
from ._elffile import EIClass, EIData, ELFFile, EMachine
EF_ARM_ABIMASK = 0xFF000000
EF_ARM_ABI_VER5 = 0x05000000
EF_ARM_ABI_FLOAT_HARD = 0x00000400
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
# as the type for `path` until then.
@contextlib.contextmanager
def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
try:
with open(path, "rb") as f:
yield ELFFile(f)
except (OSError, TypeError, ValueError):
yield None
def _is_linux_armhf(executable: str) -> bool:
# hard-float ABI can be detected from the ELF header of the running
# process
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
with _parse_elf(executable) as f:
return (
f is not None
and f.capacity == EIClass.C32
and f.encoding == EIData.Lsb
and f.machine == EMachine.Arm
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
)
def _is_linux_i686(executable: str) -> bool:
with _parse_elf(executable) as f:
return (
f is not None
and f.capacity == EIClass.C32
and f.encoding == EIData.Lsb
and f.machine == EMachine.I386
)
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
if "armv7l" in archs:
return _is_linux_armhf(executable)
if "i686" in archs:
return _is_linux_i686(executable)
allowed_archs = {
"x86_64",
"aarch64",
"ppc64",
"ppc64le",
"s390x",
"loongarch64",
"riscv64",
}
return any(arch in allowed_archs for arch in archs)
# If glibc ever changes its major version, we need to know what the last
# minor version was, so we can build the complete list of all versions.
# For now, guess what the highest minor version might be, assume it will
# be 50 for testing. Once this actually happens, update the dictionary
# with the actual value.
_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
class _GLibCVersion(NamedTuple):
major: int
minor: int
def _glibc_version_string_confstr() -> str | None:
"""
Primary implementation of glibc_version_string using os.confstr.
"""
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
# to be broken or missing. This strategy is used in the standard library
# platform module.
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
try:
# Should be a string like "glibc 2.17".
version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
assert version_string is not None
_, version = version_string.rsplit()
except (AssertionError, AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None
return version
def _glibc_version_string_ctypes() -> str | None:
"""
Fallback implementation of glibc_version_string using ctypes.
"""
try:
import ctypes
except ImportError:
return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
#
# We must also handle the special case where the executable is not a
# dynamically linked executable. This can occur when using musl libc,
# for example. In this situation, dlopen() will error, leading to an
# OSError. Interestingly, at least in the case of musl, there is no
# errno set on the OSError. The single string argument used to construct
# OSError comes from libc itself and is therefore not portable to
# hard code here. In any case, failure to call dlopen() means we
# can proceed, so we bail on our attempt.
try:
process_namespace = ctypes.CDLL(None)
except OSError:
return None
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str: str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
def _glibc_version_string() -> str | None:
"""Returns glibc version string, or None if not using glibc."""
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
def _parse_glibc_version(version_str: str) -> tuple[int, int]:
"""Parse glibc version.
We use a regexp instead of str.split because we want to discard any
random junk that might come after the minor version -- this might happen
in patched/forked versions of glibc (e.g. Linaro's version of glibc
uses version strings like "2.20-2014.11"). See gh-3588.
"""
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn(
f"Expected glibc version with 2 components major.minor, got: {version_str}",
RuntimeWarning,
stacklevel=2,
)
return -1, -1
return int(m.group("major")), int(m.group("minor"))
@functools.lru_cache
def _get_glibc_version() -> tuple[int, int]:
version_str = _glibc_version_string()
if version_str is None:
return (-1, -1)
return _parse_glibc_version(version_str)
# From PEP 513, PEP 600
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
sys_glibc = _get_glibc_version()
if sys_glibc < version:
return False
# Check for presence of _manylinux module.
try:
import _manylinux
except ImportError:
return True
if hasattr(_manylinux, "manylinux_compatible"):
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
if result is not None:
return bool(result)
return True
if version == _GLibCVersion(2, 5):
if hasattr(_manylinux, "manylinux1_compatible"):
return bool(_manylinux.manylinux1_compatible)
if version == _GLibCVersion(2, 12):
if hasattr(_manylinux, "manylinux2010_compatible"):
return bool(_manylinux.manylinux2010_compatible)
if version == _GLibCVersion(2, 17):
if hasattr(_manylinux, "manylinux2014_compatible"):
return bool(_manylinux.manylinux2014_compatible)
return True
_LEGACY_MANYLINUX_MAP = {
# CentOS 7 w/ glibc 2.17 (PEP 599)
(2, 17): "manylinux2014",
# CentOS 6 w/ glibc 2.12 (PEP 571)
(2, 12): "manylinux2010",
# CentOS 5 w/ glibc 2.5 (PEP 513)
(2, 5): "manylinux1",
}
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
"""Generate manylinux tags compatible to the current platform.
:param archs: Sequence of compatible architectures.
The first one shall be the closest to the actual architecture and be the part of
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
be manylinux-compatible.
:returns: An iterator of compatible manylinux tags.
"""
if not _have_compatible_abi(sys.executable, archs):
return
# Oldest glibc to be supported regardless of architecture is (2, 17).
too_old_glibc2 = _GLibCVersion(2, 16)
if set(archs) & {"x86_64", "i686"}:
# On x86/i686 also oldest glibc to be supported is (2, 5).
too_old_glibc2 = _GLibCVersion(2, 4)
current_glibc = _GLibCVersion(*_get_glibc_version())
glibc_max_list = [current_glibc]
# We can assume compatibility across glibc major versions.
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
#
# Build a list of maximum glibc versions so that we can
# output the canonical list of all glibc from current_glibc
# down to too_old_glibc2, including all intermediary versions.
for glibc_major in range(current_glibc.major - 1, 1, -1):
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
for arch in archs:
for glibc_max in glibc_max_list:
if glibc_max.major == too_old_glibc2.major:
min_minor = too_old_glibc2.minor
else:
# For other glibc major versions oldest supported is (x, 0).
min_minor = -1
for glibc_minor in range(glibc_max.minor, min_minor, -1):
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
tag = "manylinux_{}_{}".format(*glibc_version)
if _is_compatible(arch, glibc_version):
yield f"{tag}_{arch}"
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
if glibc_version in _LEGACY_MANYLINUX_MAP:
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
if _is_compatible(arch, glibc_version):
yield f"{legacy_tag}_{arch}"

View File

@ -0,0 +1,85 @@
"""PEP 656 support.
This module implements logic to detect if the currently running Python is
linked against musl, and what musl version is used.
"""
from __future__ import annotations
import functools
import re
import subprocess
import sys
from typing import Iterator, NamedTuple, Sequence
from ._elffile import ELFFile
class _MuslVersion(NamedTuple):
major: int
minor: int
def _parse_musl_version(output: str) -> _MuslVersion | None:
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
if len(lines) < 2 or lines[0][:4] != "musl":
return None
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
if not m:
return None
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
@functools.lru_cache
def _get_musl_version(executable: str) -> _MuslVersion | None:
"""Detect currently-running musl runtime version.
This is done by checking the specified executable's dynamic linking
information, and invoking the loader to parse its output for a version
string. If the loader is musl, the output would be something like::
musl libc (x86_64)
Version 1.2.2
Dynamic Program Loader
"""
try:
with open(executable, "rb") as f:
ld = ELFFile(f).interpreter
except (OSError, TypeError, ValueError):
return None
if ld is None or "musl" not in ld:
return None
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
return _parse_musl_version(proc.stderr)
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
"""Generate musllinux tags compatible to the current platform.
:param archs: Sequence of compatible architectures.
The first one shall be the closest to the actual architecture and be the part of
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
be musllinux-compatible.
:returns: An iterator of compatible musllinux tags.
"""
sys_musl = _get_musl_version(sys.executable)
if sys_musl is None: # Python not dynamically linked against musl.
return
for arch in archs:
for minor in range(sys_musl.minor, -1, -1):
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
if __name__ == "__main__": # pragma: no cover
import sysconfig
plat = sysconfig.get_platform()
assert plat.startswith("linux-"), "not linux"
print("plat:", plat)
print("musl:", _get_musl_version(sys.executable))
print("tags:", end=" ")
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
print(t, end="\n ")

View File

@ -0,0 +1,353 @@
"""Handwritten parser of dependency specifiers.
The docstring for each __parse_* function contains EBNF-inspired grammar representing
the implementation.
"""
from __future__ import annotations
import ast
from typing import NamedTuple, Sequence, Tuple, Union
from ._tokenizer import DEFAULT_RULES, Tokenizer
class Node:
def __init__(self, value: str) -> None:
self.value = value
def __str__(self) -> str:
return self.value
def __repr__(self) -> str:
return f"<{self.__class__.__name__}('{self}')>"
def serialize(self) -> str:
raise NotImplementedError
class Variable(Node):
def serialize(self) -> str:
return str(self)
class Value(Node):
def serialize(self) -> str:
return f'"{self}"'
class Op(Node):
def serialize(self) -> str:
return str(self)
MarkerVar = Union[Variable, Value]
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]
class ParsedRequirement(NamedTuple):
name: str
url: str
extras: list[str]
specifier: str
marker: MarkerList | None
# --------------------------------------------------------------------------------------
# Recursive descent parser for dependency specifier
# --------------------------------------------------------------------------------------
def parse_requirement(source: str) -> ParsedRequirement:
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
"""
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
"""
tokenizer.consume("WS")
name_token = tokenizer.expect(
"IDENTIFIER", expected="package name at the start of dependency specifier"
)
name = name_token.text
tokenizer.consume("WS")
extras = _parse_extras(tokenizer)
tokenizer.consume("WS")
url, specifier, marker = _parse_requirement_details(tokenizer)
tokenizer.expect("END", expected="end of dependency specifier")
return ParsedRequirement(name, url, extras, specifier, marker)
def _parse_requirement_details(
tokenizer: Tokenizer,
) -> tuple[str, str, MarkerList | None]:
"""
requirement_details = AT URL (WS requirement_marker?)?
| specifier WS? (requirement_marker)?
"""
specifier = ""
url = ""
marker = None
if tokenizer.check("AT"):
tokenizer.read()
tokenizer.consume("WS")
url_start = tokenizer.position
url = tokenizer.expect("URL", expected="URL after @").text
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
tokenizer.expect("WS", expected="whitespace after URL")
# The input might end after whitespace.
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
marker = _parse_requirement_marker(
tokenizer, span_start=url_start, after="URL and whitespace"
)
else:
specifier_start = tokenizer.position
specifier = _parse_specifier(tokenizer)
tokenizer.consume("WS")
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
marker = _parse_requirement_marker(
tokenizer,
span_start=specifier_start,
after=(
"version specifier"
if specifier
else "name and no valid version specifier"
),
)
return (url, specifier, marker)
def _parse_requirement_marker(
tokenizer: Tokenizer, *, span_start: int, after: str
) -> MarkerList:
"""
requirement_marker = SEMICOLON marker WS?
"""
if not tokenizer.check("SEMICOLON"):
tokenizer.raise_syntax_error(
f"Expected end or semicolon (after {after})",
span_start=span_start,
)
tokenizer.read()
marker = _parse_marker(tokenizer)
tokenizer.consume("WS")
return marker
def _parse_extras(tokenizer: Tokenizer) -> list[str]:
"""
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
"""
if not tokenizer.check("LEFT_BRACKET", peek=True):
return []
with tokenizer.enclosing_tokens(
"LEFT_BRACKET",
"RIGHT_BRACKET",
around="extras",
):
tokenizer.consume("WS")
extras = _parse_extras_list(tokenizer)
tokenizer.consume("WS")
return extras
def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
"""
extras_list = identifier (wsp* ',' wsp* identifier)*
"""
extras: list[str] = []
if not tokenizer.check("IDENTIFIER"):
return extras
extras.append(tokenizer.read().text)
while True:
tokenizer.consume("WS")
if tokenizer.check("IDENTIFIER", peek=True):
tokenizer.raise_syntax_error("Expected comma between extra names")
elif not tokenizer.check("COMMA"):
break
tokenizer.read()
tokenizer.consume("WS")
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
extras.append(extra_token.text)
return extras
def _parse_specifier(tokenizer: Tokenizer) -> str:
"""
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
| WS? version_many WS?
"""
with tokenizer.enclosing_tokens(
"LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS",
around="version specifier",
):
tokenizer.consume("WS")
parsed_specifiers = _parse_version_many(tokenizer)
tokenizer.consume("WS")
return parsed_specifiers
def _parse_version_many(tokenizer: Tokenizer) -> str:
"""
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
"""
parsed_specifiers = ""
while tokenizer.check("SPECIFIER"):
span_start = tokenizer.position
parsed_specifiers += tokenizer.read().text
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
tokenizer.raise_syntax_error(
".* suffix can only be used with `==` or `!=` operators",
span_start=span_start,
span_end=tokenizer.position + 1,
)
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
tokenizer.raise_syntax_error(
"Local version label can only be used with `==` or `!=` operators",
span_start=span_start,
span_end=tokenizer.position,
)
tokenizer.consume("WS")
if not tokenizer.check("COMMA"):
break
parsed_specifiers += tokenizer.read().text
tokenizer.consume("WS")
return parsed_specifiers
# --------------------------------------------------------------------------------------
# Recursive descent parser for marker expression
# --------------------------------------------------------------------------------------
def parse_marker(source: str) -> MarkerList:
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
retval = _parse_marker(tokenizer)
tokenizer.expect("END", expected="end of marker expression")
return retval
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
"""
marker = marker_atom (BOOLOP marker_atom)+
"""
expression = [_parse_marker_atom(tokenizer)]
while tokenizer.check("BOOLOP"):
token = tokenizer.read()
expr_right = _parse_marker_atom(tokenizer)
expression.extend((token.text, expr_right))
return expression
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
"""
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
| WS? marker_item WS?
"""
tokenizer.consume("WS")
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
with tokenizer.enclosing_tokens(
"LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS",
around="marker expression",
):
tokenizer.consume("WS")
marker: MarkerAtom = _parse_marker(tokenizer)
tokenizer.consume("WS")
else:
marker = _parse_marker_item(tokenizer)
tokenizer.consume("WS")
return marker
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
"""
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
"""
tokenizer.consume("WS")
marker_var_left = _parse_marker_var(tokenizer)
tokenizer.consume("WS")
marker_op = _parse_marker_op(tokenizer)
tokenizer.consume("WS")
marker_var_right = _parse_marker_var(tokenizer)
tokenizer.consume("WS")
return (marker_var_left, marker_op, marker_var_right)
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
"""
marker_var = VARIABLE | QUOTED_STRING
"""
if tokenizer.check("VARIABLE"):
return process_env_var(tokenizer.read().text.replace(".", "_"))
elif tokenizer.check("QUOTED_STRING"):
return process_python_str(tokenizer.read().text)
else:
tokenizer.raise_syntax_error(
message="Expected a marker variable or quoted string"
)
def process_env_var(env_var: str) -> Variable:
if env_var in ("platform_python_implementation", "python_implementation"):
return Variable("platform_python_implementation")
else:
return Variable(env_var)
def process_python_str(python_str: str) -> Value:
value = ast.literal_eval(python_str)
return Value(str(value))
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
"""
marker_op = IN | NOT IN | OP
"""
if tokenizer.check("IN"):
tokenizer.read()
return Op("in")
elif tokenizer.check("NOT"):
tokenizer.read()
tokenizer.expect("WS", expected="whitespace after 'not'")
tokenizer.expect("IN", expected="'in' after 'not'")
return Op("not in")
elif tokenizer.check("OP"):
return Op(tokenizer.read().text)
else:
return tokenizer.raise_syntax_error(
"Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in"
)

View File

@ -0,0 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
class InfinityType:
def __repr__(self) -> str:
return "Infinity"
def __hash__(self) -> int:
return hash(repr(self))
def __lt__(self, other: object) -> bool:
return False
def __le__(self, other: object) -> bool:
return False
def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
def __gt__(self, other: object) -> bool:
return True
def __ge__(self, other: object) -> bool:
return True
def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity
Infinity = InfinityType()
class NegativeInfinityType:
def __repr__(self) -> str:
return "-Infinity"
def __hash__(self) -> int:
return hash(repr(self))
def __lt__(self, other: object) -> bool:
return True
def __le__(self, other: object) -> bool:
return True
def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
def __gt__(self, other: object) -> bool:
return False
def __ge__(self, other: object) -> bool:
return False
def __neg__(self: object) -> InfinityType:
return Infinity
NegativeInfinity = NegativeInfinityType()

View File

@ -0,0 +1,195 @@
from __future__ import annotations
import contextlib
import re
from dataclasses import dataclass
from typing import Iterator, NoReturn
from .specifiers import Specifier
@dataclass
class Token:
name: str
text: str
position: int
class ParserSyntaxError(Exception):
"""The provided source text could not be parsed correctly."""
def __init__(
self,
message: str,
*,
source: str,
span: tuple[int, int],
) -> None:
self.span = span
self.message = message
self.source = source
super().__init__()
def __str__(self) -> str:
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
return "\n ".join([self.message, self.source, marker])
DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
"LEFT_PARENTHESIS": r"\(",
"RIGHT_PARENTHESIS": r"\)",
"LEFT_BRACKET": r"\[",
"RIGHT_BRACKET": r"\]",
"SEMICOLON": r";",
"COMMA": r",",
"QUOTED_STRING": re.compile(
r"""
(
('[^']*')
|
("[^"]*")
)
""",
re.VERBOSE,
),
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
"BOOLOP": r"\b(or|and)\b",
"IN": r"\bin\b",
"NOT": r"\bnot\b",
"VARIABLE": re.compile(
r"""
\b(
python_version
|python_full_version
|os[._]name
|sys[._]platform
|platform_(release|system)
|platform[._](version|machine|python_implementation)
|python_implementation
|implementation_(name|version)
|extras?
|dependency_groups
)\b
""",
re.VERBOSE,
),
"SPECIFIER": re.compile(
Specifier._operator_regex_str + Specifier._version_regex_str,
re.VERBOSE | re.IGNORECASE,
),
"AT": r"\@",
"URL": r"[^ \t]+",
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
"VERSION_PREFIX_TRAIL": r"\.\*",
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
"WS": r"[ \t]+",
"END": r"$",
}
class Tokenizer:
"""Context-sensitive token parsing.
Provides methods to examine the input stream to check whether the next token
matches.
"""
def __init__(
self,
source: str,
*,
rules: dict[str, str | re.Pattern[str]],
) -> None:
self.source = source
self.rules: dict[str, re.Pattern[str]] = {
name: re.compile(pattern) for name, pattern in rules.items()
}
self.next_token: Token | None = None
self.position = 0
def consume(self, name: str) -> None:
"""Move beyond provided token name, if at current position."""
if self.check(name):
self.read()
def check(self, name: str, *, peek: bool = False) -> bool:
"""Check whether the next token has the provided name.
By default, if the check succeeds, the token *must* be read before
another check. If `peek` is set to `True`, the token is not loaded and
would need to be checked again.
"""
assert self.next_token is None, (
f"Cannot check for {name!r}, already have {self.next_token!r}"
)
assert name in self.rules, f"Unknown token name: {name!r}"
expression = self.rules[name]
match = expression.match(self.source, self.position)
if match is None:
return False
if not peek:
self.next_token = Token(name, match[0], self.position)
return True
def expect(self, name: str, *, expected: str) -> Token:
"""Expect a certain token name next, failing with a syntax error otherwise.
The token is *not* read.
"""
if not self.check(name):
raise self.raise_syntax_error(f"Expected {expected}")
return self.read()
def read(self) -> Token:
"""Consume the next token and return it."""
token = self.next_token
assert token is not None
self.position += len(token.text)
self.next_token = None
return token
def raise_syntax_error(
self,
message: str,
*,
span_start: int | None = None,
span_end: int | None = None,
) -> NoReturn:
"""Raise ParserSyntaxError at the given position."""
span = (
self.position if span_start is None else span_start,
self.position if span_end is None else span_end,
)
raise ParserSyntaxError(
message,
source=self.source,
span=span,
)
@contextlib.contextmanager
def enclosing_tokens(
self, open_token: str, close_token: str, *, around: str
) -> Iterator[None]:
if self.check(open_token):
open_position = self.position
self.read()
else:
open_position = None
yield
if open_position is None:
return
if not self.check(close_token):
self.raise_syntax_error(
f"Expected matching {close_token} for {open_token}, after {around}",
span_start=open_position,
)
self.read()

View File

@ -0,0 +1,145 @@
#######################################################################################
#
# Adapted from:
# https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py
#
# MIT License
#
# Copyright (c) 2017-present Ofek Lev <oss@ofek.dev>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be included in all copies
# or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#
# With additional allowance of arbitrary `LicenseRef-` identifiers, not just
# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`.
#
#######################################################################################
from __future__ import annotations
import re
from typing import NewType, cast
from packaging.licenses._spdx import EXCEPTIONS, LICENSES
__all__ = [
"InvalidLicenseExpression",
"NormalizedLicenseExpression",
"canonicalize_license_expression",
]
license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$")
NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str)
class InvalidLicenseExpression(ValueError):
"""Raised when a license-expression string is invalid
>>> canonicalize_license_expression("invalid")
Traceback (most recent call last):
...
packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid'
"""
def canonicalize_license_expression(
raw_license_expression: str,
) -> NormalizedLicenseExpression:
if not raw_license_expression:
message = f"Invalid license expression: {raw_license_expression!r}"
raise InvalidLicenseExpression(message)
# Pad any parentheses so tokenization can be achieved by merely splitting on
# whitespace.
license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ")
licenseref_prefix = "LicenseRef-"
license_refs = {
ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :]
for ref in license_expression.split()
if ref.lower().startswith(licenseref_prefix.lower())
}
# Normalize to lower case so we can look up licenses/exceptions
# and so boolean operators are Python-compatible.
license_expression = license_expression.lower()
tokens = license_expression.split()
# Rather than implementing boolean logic, we create an expression that Python can
# parse. Everything that is not involved with the grammar itself is treated as
# `False` and the expression should evaluate as such.
python_tokens = []
for token in tokens:
if token not in {"or", "and", "with", "(", ")"}:
python_tokens.append("False")
elif token == "with":
python_tokens.append("or")
elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}:
message = f"Invalid license expression: {raw_license_expression!r}"
raise InvalidLicenseExpression(message)
else:
python_tokens.append(token)
python_expression = " ".join(python_tokens)
try:
invalid = eval(python_expression, globals(), locals())
except Exception:
invalid = True
if invalid is not False:
message = f"Invalid license expression: {raw_license_expression!r}"
raise InvalidLicenseExpression(message) from None
# Take a final pass to check for unknown licenses/exceptions.
normalized_tokens = []
for token in tokens:
if token in {"or", "and", "with", "(", ")"}:
normalized_tokens.append(token.upper())
continue
if normalized_tokens and normalized_tokens[-1] == "WITH":
if token not in EXCEPTIONS:
message = f"Unknown license exception: {token!r}"
raise InvalidLicenseExpression(message)
normalized_tokens.append(EXCEPTIONS[token]["id"])
else:
if token.endswith("+"):
final_token = token[:-1]
suffix = "+"
else:
final_token = token
suffix = ""
if final_token.startswith("licenseref-"):
if not license_ref_allowed.match(final_token):
message = f"Invalid licenseref: {final_token!r}"
raise InvalidLicenseExpression(message)
normalized_tokens.append(license_refs[final_token] + suffix)
else:
if final_token not in LICENSES:
message = f"Unknown license: {final_token!r}"
raise InvalidLicenseExpression(message)
normalized_tokens.append(LICENSES[final_token]["id"] + suffix)
normalized_expression = " ".join(normalized_tokens)
return cast(
NormalizedLicenseExpression,
normalized_expression.replace("( ", "(").replace(" )", ")"),
)

View File

@ -0,0 +1,759 @@
from __future__ import annotations
from typing import TypedDict
class SPDXLicense(TypedDict):
id: str
deprecated: bool
class SPDXException(TypedDict):
id: str
deprecated: bool
VERSION = '3.25.0'
LICENSES: dict[str, SPDXLicense] = {
'0bsd': {'id': '0BSD', 'deprecated': False},
'3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False},
'aal': {'id': 'AAL', 'deprecated': False},
'abstyles': {'id': 'Abstyles', 'deprecated': False},
'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False},
'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False},
'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False},
'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False},
'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False},
'adsl': {'id': 'ADSL', 'deprecated': False},
'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False},
'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False},
'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False},
'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False},
'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False},
'afmparse': {'id': 'Afmparse', 'deprecated': False},
'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True},
'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False},
'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False},
'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True},
'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False},
'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False},
'aladdin': {'id': 'Aladdin', 'deprecated': False},
'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False},
'amdplpa': {'id': 'AMDPLPA', 'deprecated': False},
'aml': {'id': 'AML', 'deprecated': False},
'aml-glslang': {'id': 'AML-glslang', 'deprecated': False},
'ampas': {'id': 'AMPAS', 'deprecated': False},
'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False},
'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False},
'any-osi': {'id': 'any-OSI', 'deprecated': False},
'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False},
'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False},
'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False},
'apafml': {'id': 'APAFML', 'deprecated': False},
'apl-1.0': {'id': 'APL-1.0', 'deprecated': False},
'app-s2p': {'id': 'App-s2p', 'deprecated': False},
'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False},
'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False},
'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False},
'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False},
'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False},
'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False},
'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False},
'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False},
'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False},
'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False},
'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False},
'baekmuk': {'id': 'Baekmuk', 'deprecated': False},
'bahyph': {'id': 'Bahyph', 'deprecated': False},
'barr': {'id': 'Barr', 'deprecated': False},
'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False},
'beerware': {'id': 'Beerware', 'deprecated': False},
'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False},
'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False},
'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False},
'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False},
'blessing': {'id': 'blessing', 'deprecated': False},
'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False},
'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False},
'borceux': {'id': 'Borceux', 'deprecated': False},
'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False},
'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False},
'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False},
'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False},
'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False},
'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False},
'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True},
'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True},
'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False},
'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False},
'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False},
'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False},
'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False},
'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False},
'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False},
'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False},
'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False},
'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False},
'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False},
'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False},
'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False},
'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False},
'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False},
'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False},
'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False},
'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False},
'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False},
'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False},
'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False},
'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False},
'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False},
'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False},
'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False},
'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False},
'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False},
'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False},
'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False},
'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False},
'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False},
'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True},
'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False},
'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False},
'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False},
'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False},
'caldera': {'id': 'Caldera', 'deprecated': False},
'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False},
'catharon': {'id': 'Catharon', 'deprecated': False},
'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False},
'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False},
'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False},
'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False},
'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False},
'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False},
'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False},
'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False},
'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False},
'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False},
'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False},
'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False},
'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False},
'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False},
'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False},
'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False},
'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False},
'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False},
'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False},
'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False},
'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False},
'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False},
'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False},
'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False},
'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False},
'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False},
'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False},
'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False},
'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False},
'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False},
'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False},
'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False},
'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False},
'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False},
'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False},
'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False},
'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False},
'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False},
'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False},
'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False},
'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False},
'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False},
'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False},
'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False},
'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False},
'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False},
'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False},
'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False},
'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False},
'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False},
'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False},
'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False},
'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False},
'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False},
'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False},
'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False},
'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False},
'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False},
'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False},
'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False},
'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False},
'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False},
'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False},
'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False},
'cecill-b': {'id': 'CECILL-B', 'deprecated': False},
'cecill-c': {'id': 'CECILL-C', 'deprecated': False},
'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False},
'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False},
'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False},
'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False},
'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False},
'cfitsio': {'id': 'CFITSIO', 'deprecated': False},
'check-cvs': {'id': 'check-cvs', 'deprecated': False},
'checkmk': {'id': 'checkmk', 'deprecated': False},
'clartistic': {'id': 'ClArtistic', 'deprecated': False},
'clips': {'id': 'Clips', 'deprecated': False},
'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False},
'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False},
'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False},
'cnri-python': {'id': 'CNRI-Python', 'deprecated': False},
'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False},
'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False},
'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False},
'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False},
'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False},
'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False},
'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False},
'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False},
'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False},
'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False},
'cronyx': {'id': 'Cronyx', 'deprecated': False},
'crossword': {'id': 'Crossword', 'deprecated': False},
'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False},
'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False},
'cube': {'id': 'Cube', 'deprecated': False},
'curl': {'id': 'curl', 'deprecated': False},
'cve-tou': {'id': 'cve-tou', 'deprecated': False},
'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False},
'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False},
'diffmark': {'id': 'diffmark', 'deprecated': False},
'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False},
'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False},
'doc': {'id': 'DOC', 'deprecated': False},
'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False},
'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False},
'dotseqn': {'id': 'Dotseqn', 'deprecated': False},
'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False},
'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False},
'dsdp': {'id': 'DSDP', 'deprecated': False},
'dtoa': {'id': 'dtoa', 'deprecated': False},
'dvipdfm': {'id': 'dvipdfm', 'deprecated': False},
'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False},
'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False},
'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True},
'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False},
'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False},
'egenix': {'id': 'eGenix', 'deprecated': False},
'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False},
'entessa': {'id': 'Entessa', 'deprecated': False},
'epics': {'id': 'EPICS', 'deprecated': False},
'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False},
'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False},
'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False},
'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False},
'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False},
'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False},
'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False},
'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False},
'eurosym': {'id': 'Eurosym', 'deprecated': False},
'fair': {'id': 'Fair', 'deprecated': False},
'fbm': {'id': 'FBM', 'deprecated': False},
'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False},
'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False},
'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False},
'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False},
'freeimage': {'id': 'FreeImage', 'deprecated': False},
'fsfap': {'id': 'FSFAP', 'deprecated': False},
'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False},
'fsful': {'id': 'FSFUL', 'deprecated': False},
'fsfullr': {'id': 'FSFULLR', 'deprecated': False},
'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False},
'ftl': {'id': 'FTL', 'deprecated': False},
'furuseth': {'id': 'Furuseth', 'deprecated': False},
'fwlw': {'id': 'fwlw', 'deprecated': False},
'gcr-docs': {'id': 'GCR-docs', 'deprecated': False},
'gd': {'id': 'GD', 'deprecated': False},
'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True},
'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False},
'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False},
'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False},
'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False},
'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False},
'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False},
'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True},
'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False},
'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False},
'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False},
'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False},
'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False},
'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False},
'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True},
'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False},
'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False},
'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False},
'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False},
'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False},
'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False},
'giftware': {'id': 'Giftware', 'deprecated': False},
'gl2ps': {'id': 'GL2PS', 'deprecated': False},
'glide': {'id': 'Glide', 'deprecated': False},
'glulxe': {'id': 'Glulxe', 'deprecated': False},
'glwtpl': {'id': 'GLWTPL', 'deprecated': False},
'gnuplot': {'id': 'gnuplot', 'deprecated': False},
'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True},
'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True},
'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False},
'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False},
'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True},
'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True},
'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False},
'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False},
'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True},
'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True},
'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True},
'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True},
'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True},
'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True},
'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True},
'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False},
'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False},
'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True},
'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True},
'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False},
'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False},
'gtkbook': {'id': 'gtkbook', 'deprecated': False},
'gutmann': {'id': 'Gutmann', 'deprecated': False},
'haskellreport': {'id': 'HaskellReport', 'deprecated': False},
'hdparm': {'id': 'hdparm', 'deprecated': False},
'hidapi': {'id': 'HIDAPI', 'deprecated': False},
'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False},
'hp-1986': {'id': 'HP-1986', 'deprecated': False},
'hp-1989': {'id': 'HP-1989', 'deprecated': False},
'hpnd': {'id': 'HPND', 'deprecated': False},
'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False},
'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False},
'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False},
'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False},
'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False},
'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False},
'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False},
'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False},
'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False},
'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False},
'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False},
'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False},
'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False},
'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False},
'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False},
'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False},
'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False},
'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False},
'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False},
'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False},
'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False},
'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False},
'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False},
'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False},
'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False},
'icu': {'id': 'ICU', 'deprecated': False},
'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False},
'ijg': {'id': 'IJG', 'deprecated': False},
'ijg-short': {'id': 'IJG-short', 'deprecated': False},
'imagemagick': {'id': 'ImageMagick', 'deprecated': False},
'imatix': {'id': 'iMatix', 'deprecated': False},
'imlib2': {'id': 'Imlib2', 'deprecated': False},
'info-zip': {'id': 'Info-ZIP', 'deprecated': False},
'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False},
'intel': {'id': 'Intel', 'deprecated': False},
'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False},
'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False},
'ipa': {'id': 'IPA', 'deprecated': False},
'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False},
'isc': {'id': 'ISC', 'deprecated': False},
'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False},
'jam': {'id': 'Jam', 'deprecated': False},
'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False},
'jpl-image': {'id': 'JPL-image', 'deprecated': False},
'jpnic': {'id': 'JPNIC', 'deprecated': False},
'json': {'id': 'JSON', 'deprecated': False},
'kastrup': {'id': 'Kastrup', 'deprecated': False},
'kazlib': {'id': 'Kazlib', 'deprecated': False},
'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False},
'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False},
'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False},
'latex2e': {'id': 'Latex2e', 'deprecated': False},
'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False},
'leptonica': {'id': 'Leptonica', 'deprecated': False},
'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True},
'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True},
'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False},
'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False},
'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True},
'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True},
'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False},
'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False},
'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True},
'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True},
'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False},
'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False},
'lgpllr': {'id': 'LGPLLR', 'deprecated': False},
'libpng': {'id': 'Libpng', 'deprecated': False},
'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False},
'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False},
'libtiff': {'id': 'libtiff', 'deprecated': False},
'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False},
'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False},
'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False},
'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False},
'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False},
'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False},
'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False},
'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False},
'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False},
'loop': {'id': 'LOOP', 'deprecated': False},
'lpd-document': {'id': 'LPD-document', 'deprecated': False},
'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False},
'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False},
'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False},
'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False},
'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False},
'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False},
'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False},
'lsof': {'id': 'lsof', 'deprecated': False},
'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False},
'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False},
'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False},
'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False},
'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False},
'magaz': {'id': 'magaz', 'deprecated': False},
'mailprio': {'id': 'mailprio', 'deprecated': False},
'makeindex': {'id': 'MakeIndex', 'deprecated': False},
'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False},
'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False},
'metamail': {'id': 'metamail', 'deprecated': False},
'minpack': {'id': 'Minpack', 'deprecated': False},
'miros': {'id': 'MirOS', 'deprecated': False},
'mit': {'id': 'MIT', 'deprecated': False},
'mit-0': {'id': 'MIT-0', 'deprecated': False},
'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False},
'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False},
'mit-enna': {'id': 'MIT-enna', 'deprecated': False},
'mit-feh': {'id': 'MIT-feh', 'deprecated': False},
'mit-festival': {'id': 'MIT-Festival', 'deprecated': False},
'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False},
'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False},
'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False},
'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False},
'mit-wu': {'id': 'MIT-Wu', 'deprecated': False},
'mitnfa': {'id': 'MITNFA', 'deprecated': False},
'mmixware': {'id': 'MMIXware', 'deprecated': False},
'motosoto': {'id': 'Motosoto', 'deprecated': False},
'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False},
'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False},
'mpich2': {'id': 'mpich2', 'deprecated': False},
'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False},
'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False},
'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False},
'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False},
'mplus': {'id': 'mplus', 'deprecated': False},
'ms-lpl': {'id': 'MS-LPL', 'deprecated': False},
'ms-pl': {'id': 'MS-PL', 'deprecated': False},
'ms-rl': {'id': 'MS-RL', 'deprecated': False},
'mtll': {'id': 'MTLL', 'deprecated': False},
'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False},
'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False},
'multics': {'id': 'Multics', 'deprecated': False},
'mup': {'id': 'Mup', 'deprecated': False},
'naist-2003': {'id': 'NAIST-2003', 'deprecated': False},
'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False},
'naumen': {'id': 'Naumen', 'deprecated': False},
'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False},
'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False},
'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False},
'ncl': {'id': 'NCL', 'deprecated': False},
'ncsa': {'id': 'NCSA', 'deprecated': False},
'net-snmp': {'id': 'Net-SNMP', 'deprecated': True},
'netcdf': {'id': 'NetCDF', 'deprecated': False},
'newsletr': {'id': 'Newsletr', 'deprecated': False},
'ngpl': {'id': 'NGPL', 'deprecated': False},
'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False},
'nist-pd': {'id': 'NIST-PD', 'deprecated': False},
'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False},
'nist-software': {'id': 'NIST-Software', 'deprecated': False},
'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False},
'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False},
'nlpl': {'id': 'NLPL', 'deprecated': False},
'nokia': {'id': 'Nokia', 'deprecated': False},
'nosl': {'id': 'NOSL', 'deprecated': False},
'noweb': {'id': 'Noweb', 'deprecated': False},
'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False},
'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False},
'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False},
'nrl': {'id': 'NRL', 'deprecated': False},
'ntp': {'id': 'NTP', 'deprecated': False},
'ntp-0': {'id': 'NTP-0', 'deprecated': False},
'nunit': {'id': 'Nunit', 'deprecated': True},
'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False},
'oar': {'id': 'OAR', 'deprecated': False},
'occt-pl': {'id': 'OCCT-PL', 'deprecated': False},
'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False},
'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False},
'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False},
'offis': {'id': 'OFFIS', 'deprecated': False},
'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False},
'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False},
'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False},
'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False},
'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False},
'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False},
'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False},
'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False},
'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False},
'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False},
'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False},
'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False},
'ogtsl': {'id': 'OGTSL', 'deprecated': False},
'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False},
'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False},
'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False},
'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False},
'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False},
'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False},
'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False},
'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False},
'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False},
'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False},
'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False},
'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False},
'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False},
'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False},
'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False},
'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False},
'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False},
'oml': {'id': 'OML', 'deprecated': False},
'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False},
'openssl': {'id': 'OpenSSL', 'deprecated': False},
'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False},
'openvision': {'id': 'OpenVision', 'deprecated': False},
'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False},
'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False},
'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False},
'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False},
'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False},
'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False},
'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False},
'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False},
'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False},
'padl': {'id': 'PADL', 'deprecated': False},
'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False},
'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False},
'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False},
'php-3.0': {'id': 'PHP-3.0', 'deprecated': False},
'php-3.01': {'id': 'PHP-3.01', 'deprecated': False},
'pixar': {'id': 'Pixar', 'deprecated': False},
'pkgconf': {'id': 'pkgconf', 'deprecated': False},
'plexus': {'id': 'Plexus', 'deprecated': False},
'pnmstitch': {'id': 'pnmstitch', 'deprecated': False},
'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False},
'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False},
'postgresql': {'id': 'PostgreSQL', 'deprecated': False},
'ppl': {'id': 'PPL', 'deprecated': False},
'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False},
'psfrag': {'id': 'psfrag', 'deprecated': False},
'psutils': {'id': 'psutils', 'deprecated': False},
'python-2.0': {'id': 'Python-2.0', 'deprecated': False},
'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False},
'python-ldap': {'id': 'python-ldap', 'deprecated': False},
'qhull': {'id': 'Qhull', 'deprecated': False},
'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False},
'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False},
'radvd': {'id': 'radvd', 'deprecated': False},
'rdisc': {'id': 'Rdisc', 'deprecated': False},
'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False},
'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False},
'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False},
'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False},
'rsa-md': {'id': 'RSA-MD', 'deprecated': False},
'rscpl': {'id': 'RSCPL', 'deprecated': False},
'ruby': {'id': 'Ruby', 'deprecated': False},
'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False},
'sax-pd': {'id': 'SAX-PD', 'deprecated': False},
'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False},
'saxpath': {'id': 'Saxpath', 'deprecated': False},
'scea': {'id': 'SCEA', 'deprecated': False},
'schemereport': {'id': 'SchemeReport', 'deprecated': False},
'sendmail': {'id': 'Sendmail', 'deprecated': False},
'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False},
'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False},
'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False},
'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False},
'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False},
'sgp4': {'id': 'SGP4', 'deprecated': False},
'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False},
'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False},
'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False},
'sissl': {'id': 'SISSL', 'deprecated': False},
'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False},
'sl': {'id': 'SL', 'deprecated': False},
'sleepycat': {'id': 'Sleepycat', 'deprecated': False},
'smlnj': {'id': 'SMLNJ', 'deprecated': False},
'smppl': {'id': 'SMPPL', 'deprecated': False},
'snia': {'id': 'SNIA', 'deprecated': False},
'snprintf': {'id': 'snprintf', 'deprecated': False},
'softsurfer': {'id': 'softSurfer', 'deprecated': False},
'soundex': {'id': 'Soundex', 'deprecated': False},
'spencer-86': {'id': 'Spencer-86', 'deprecated': False},
'spencer-94': {'id': 'Spencer-94', 'deprecated': False},
'spencer-99': {'id': 'Spencer-99', 'deprecated': False},
'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False},
'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False},
'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False},
'ssh-short': {'id': 'SSH-short', 'deprecated': False},
'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False},
'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False},
'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True},
'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False},
'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False},
'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False},
'sunpro': {'id': 'SunPro', 'deprecated': False},
'swl': {'id': 'SWL', 'deprecated': False},
'swrule': {'id': 'swrule', 'deprecated': False},
'symlinks': {'id': 'Symlinks', 'deprecated': False},
'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False},
'tcl': {'id': 'TCL', 'deprecated': False},
'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False},
'termreadkey': {'id': 'TermReadKey', 'deprecated': False},
'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False},
'threeparttable': {'id': 'threeparttable', 'deprecated': False},
'tmate': {'id': 'TMate', 'deprecated': False},
'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False},
'tosl': {'id': 'TOSL', 'deprecated': False},
'tpdl': {'id': 'TPDL', 'deprecated': False},
'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False},
'ttwl': {'id': 'TTWL', 'deprecated': False},
'ttyp0': {'id': 'TTYP0', 'deprecated': False},
'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False},
'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False},
'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False},
'ucar': {'id': 'UCAR', 'deprecated': False},
'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False},
'ulem': {'id': 'ulem', 'deprecated': False},
'umich-merit': {'id': 'UMich-Merit', 'deprecated': False},
'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False},
'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False},
'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False},
'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False},
'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False},
'unlicense': {'id': 'Unlicense', 'deprecated': False},
'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False},
'urt-rle': {'id': 'URT-RLE', 'deprecated': False},
'vim': {'id': 'Vim', 'deprecated': False},
'vostrom': {'id': 'VOSTROM', 'deprecated': False},
'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False},
'w3c': {'id': 'W3C', 'deprecated': False},
'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False},
'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False},
'w3m': {'id': 'w3m', 'deprecated': False},
'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False},
'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False},
'wsuipa': {'id': 'Wsuipa', 'deprecated': False},
'wtfpl': {'id': 'WTFPL', 'deprecated': False},
'wxwindows': {'id': 'wxWindows', 'deprecated': True},
'x11': {'id': 'X11', 'deprecated': False},
'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False},
'x11-swapped': {'id': 'X11-swapped', 'deprecated': False},
'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False},
'xerox': {'id': 'Xerox', 'deprecated': False},
'xfig': {'id': 'Xfig', 'deprecated': False},
'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False},
'xinetd': {'id': 'xinetd', 'deprecated': False},
'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False},
'xlock': {'id': 'xlock', 'deprecated': False},
'xnet': {'id': 'Xnet', 'deprecated': False},
'xpp': {'id': 'xpp', 'deprecated': False},
'xskat': {'id': 'XSkat', 'deprecated': False},
'xzoom': {'id': 'xzoom', 'deprecated': False},
'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False},
'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False},
'zed': {'id': 'Zed', 'deprecated': False},
'zeeff': {'id': 'Zeeff', 'deprecated': False},
'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False},
'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False},
'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False},
'zlib': {'id': 'Zlib', 'deprecated': False},
'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False},
'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False},
'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False},
'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False},
}
EXCEPTIONS: dict[str, SPDXException] = {
'389-exception': {'id': '389-exception', 'deprecated': False},
'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False},
'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False},
'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False},
'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False},
'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False},
'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False},
'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False},
'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False},
'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False},
'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False},
'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False},
'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False},
'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False},
'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False},
'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False},
'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False},
'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False},
'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False},
'fmt-exception': {'id': 'fmt-exception', 'deprecated': False},
'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False},
'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False},
'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False},
'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False},
'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False},
'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False},
'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False},
'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False},
'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False},
'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False},
'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False},
'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False},
'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False},
'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False},
'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False},
'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False},
'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False},
'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False},
'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False},
'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False},
'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False},
'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False},
'llgpl': {'id': 'LLGPL', 'deprecated': False},
'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False},
'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False},
'mif-exception': {'id': 'mif-exception', 'deprecated': False},
'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True},
'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False},
'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False},
'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False},
'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False},
'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False},
'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False},
'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False},
'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False},
'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False},
'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False},
'romic-exception': {'id': 'romic-exception', 'deprecated': False},
'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False},
'sane-exception': {'id': 'SANE-exception', 'deprecated': False},
'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False},
'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False},
'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False},
'swi-exception': {'id': 'SWI-exception', 'deprecated': False},
'swift-exception': {'id': 'Swift-exception', 'deprecated': False},
'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False},
'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False},
'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False},
'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False},
'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False},
'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False},
'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False},
}

View File

@ -0,0 +1,362 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import operator
import os
import platform
import sys
from typing import AbstractSet, Any, Callable, Literal, TypedDict, Union, cast
from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
from ._parser import parse_marker as _parse_marker
from ._tokenizer import ParserSyntaxError
from .specifiers import InvalidSpecifier, Specifier
from .utils import canonicalize_name
__all__ = [
"EvaluateContext",
"InvalidMarker",
"Marker",
"UndefinedComparison",
"UndefinedEnvironmentName",
"default_environment",
]
Operator = Callable[[str, Union[str, AbstractSet[str]]], bool]
EvaluateContext = Literal["metadata", "lock_file", "requirement"]
MARKERS_ALLOWING_SET = {"extras", "dependency_groups"}
class InvalidMarker(ValueError):
"""
An invalid marker was found, users should refer to PEP 508.
"""
class UndefinedComparison(ValueError):
"""
An invalid operation was attempted on a value that doesn't support it.
"""
class UndefinedEnvironmentName(ValueError):
"""
A name was attempted to be used that does not exist inside of the
environment.
"""
class Environment(TypedDict):
implementation_name: str
"""The implementation's identifier, e.g. ``'cpython'``."""
implementation_version: str
"""
The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
``'7.3.13'`` for PyPy3.10 v7.3.13.
"""
os_name: str
"""
The value of :py:data:`os.name`. The name of the operating system dependent module
imported, e.g. ``'posix'``.
"""
platform_machine: str
"""
Returns the machine type, e.g. ``'i386'``.
An empty string if the value cannot be determined.
"""
platform_release: str
"""
The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
An empty string if the value cannot be determined.
"""
platform_system: str
"""
The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
An empty string if the value cannot be determined.
"""
platform_version: str
"""
The system's release version, e.g. ``'#3 on degas'``.
An empty string if the value cannot be determined.
"""
python_full_version: str
"""
The Python version as string ``'major.minor.patchlevel'``.
Note that unlike the Python :py:data:`sys.version`, this value will always include
the patchlevel (it defaults to 0).
"""
platform_python_implementation: str
"""
A string identifying the Python implementation, e.g. ``'CPython'``.
"""
python_version: str
"""The Python version as string ``'major.minor'``."""
sys_platform: str
"""
This string contains a platform identifier that can be used to append
platform-specific components to :py:data:`sys.path`, for instance.
For Unix systems, except on Linux and AIX, this is the lowercased OS name as
returned by ``uname -s`` with the first part of the version as returned by
``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
was built.
"""
def _normalize_extra_values(results: Any) -> Any:
"""
Normalize extra values.
"""
if isinstance(results[0], tuple):
lhs, op, rhs = results[0]
if isinstance(lhs, Variable) and lhs.value == "extra":
normalized_extra = canonicalize_name(rhs.value)
rhs = Value(normalized_extra)
elif isinstance(rhs, Variable) and rhs.value == "extra":
normalized_extra = canonicalize_name(lhs.value)
lhs = Value(normalized_extra)
results[0] = lhs, op, rhs
return results
def _format_marker(
marker: list[str] | MarkerAtom | str, first: bool | None = True
) -> str:
assert isinstance(marker, (list, tuple, str))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (
isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0])
if isinstance(marker, list):
inner = (_format_marker(m, first=False) for m in marker)
if first:
return " ".join(inner)
else:
return "(" + " ".join(inner) + ")"
elif isinstance(marker, tuple):
return " ".join([m.serialize() for m in marker])
else:
return marker
_operators: dict[str, Operator] = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
}
def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str]) -> bool:
if isinstance(rhs, str):
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
pass
else:
return spec.contains(lhs, prereleases=True)
oper: Operator | None = _operators.get(op.serialize())
if oper is None:
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
return oper(lhs, rhs)
def _normalize(
lhs: str, rhs: str | AbstractSet[str], key: str
) -> tuple[str, str | AbstractSet[str]]:
# PEP 685 Comparison of extra names for optional distribution dependencies
# https://peps.python.org/pep-0685/
# > When comparing extra names, tools MUST normalize the names being
# > compared using the semantics outlined in PEP 503 for names
if key == "extra":
assert isinstance(rhs, str), "extra value must be a string"
return (canonicalize_name(lhs), canonicalize_name(rhs))
if key in MARKERS_ALLOWING_SET:
if isinstance(rhs, str): # pragma: no cover
return (canonicalize_name(lhs), canonicalize_name(rhs))
else:
return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs})
# other environment markers don't have such standards
return lhs, rhs
def _evaluate_markers(
markers: MarkerList, environment: dict[str, str | AbstractSet[str]]
) -> bool:
groups: list[list[bool]] = [[]]
for marker in markers:
assert isinstance(marker, (list, tuple, str))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
elif isinstance(marker, tuple):
lhs, op, rhs = marker
if isinstance(lhs, Variable):
environment_key = lhs.value
lhs_value = environment[environment_key]
rhs_value = rhs.value
else:
lhs_value = lhs.value
environment_key = rhs.value
rhs_value = environment[environment_key]
assert isinstance(lhs_value, str), "lhs must be a string"
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
if marker == "or":
groups.append([])
return any(all(item) for item in groups)
def format_full_version(info: sys._version_info) -> str:
version = f"{info.major}.{info.minor}.{info.micro}"
kind = info.releaselevel
if kind != "final":
version += kind[0] + str(info.serial)
return version
def default_environment() -> Environment:
iver = format_full_version(sys.implementation.version)
implementation_name = sys.implementation.name
return {
"implementation_name": implementation_name,
"implementation_version": iver,
"os_name": os.name,
"platform_machine": platform.machine(),
"platform_release": platform.release(),
"platform_system": platform.system(),
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform,
}
class Marker:
def __init__(self, marker: str) -> None:
# Note: We create a Marker object without calling this constructor in
# packaging.requirements.Requirement. If any additional logic is
# added here, make sure to mirror/adapt Requirement.
try:
self._markers = _normalize_extra_values(_parse_marker(marker))
# The attribute `_markers` can be described in terms of a recursive type:
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
#
# For example, the following expression:
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
#
# is parsed into:
# [
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
# 'and',
# [
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
# 'or',
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
# ]
# ]
except ParserSyntaxError as e:
raise InvalidMarker(str(e)) from e
def __str__(self) -> str:
return _format_marker(self._markers)
def __repr__(self) -> str:
return f"<Marker('{self}')>"
def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Marker):
return NotImplemented
return str(self) == str(other)
def evaluate(
self,
environment: dict[str, str] | None = None,
context: EvaluateContext = "metadata",
) -> bool:
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment. The *context* parameter specifies what
context the markers are being evaluated for, which influences what markers
are considered valid. Acceptable values are "metadata" (for core metadata;
default), "lock_file", and "requirement" (i.e. all other situations).
The environment is determined from the current Python process.
"""
current_environment = cast(
"dict[str, str | AbstractSet[str]]", default_environment()
)
if context == "lock_file":
current_environment.update(
extras=frozenset(), dependency_groups=frozenset()
)
elif context == "metadata":
current_environment["extra"] = ""
if environment is not None:
current_environment.update(environment)
# The API used to allow setting extra to None. We need to handle this
# case for backwards compatibility.
if "extra" in current_environment and current_environment["extra"] is None:
current_environment["extra"] = ""
return _evaluate_markers(
self._markers, _repair_python_full_version(current_environment)
)
def _repair_python_full_version(
env: dict[str, str | AbstractSet[str]],
) -> dict[str, str | AbstractSet[str]]:
"""
Work around platform.python_version() returning something that is not PEP 440
compliant for non-tagged Python builds.
"""
python_full_version = cast(str, env["python_full_version"])
if python_full_version.endswith("+"):
env["python_full_version"] = f"{python_full_version}local"
return env

View File

@ -0,0 +1,862 @@
from __future__ import annotations
import email.feedparser
import email.header
import email.message
import email.parser
import email.policy
import pathlib
import sys
import typing
from typing import (
Any,
Callable,
Generic,
Literal,
TypedDict,
cast,
)
from . import licenses, requirements, specifiers, utils
from . import version as version_module
from .licenses import NormalizedLicenseExpression
T = typing.TypeVar("T")
if sys.version_info >= (3, 11): # pragma: no cover
ExceptionGroup = ExceptionGroup
else: # pragma: no cover
class ExceptionGroup(Exception):
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
If :external:exc:`ExceptionGroup` is already defined by Python itself,
that version is used instead.
"""
message: str
exceptions: list[Exception]
def __init__(self, message: str, exceptions: list[Exception]) -> None:
self.message = message
self.exceptions = exceptions
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
class InvalidMetadata(ValueError):
"""A metadata field contains invalid data."""
field: str
"""The name of the field that contains invalid data."""
def __init__(self, field: str, message: str) -> None:
self.field = field
super().__init__(message)
# The RawMetadata class attempts to make as few assumptions about the underlying
# serialization formats as possible. The idea is that as long as a serialization
# formats offer some very basic primitives in *some* way then we can support
# serializing to and from that format.
class RawMetadata(TypedDict, total=False):
"""A dictionary of raw core metadata.
Each field in core metadata maps to a key of this dictionary (when data is
provided). The key is lower-case and underscores are used instead of dashes
compared to the equivalent core metadata field. Any core metadata field that
can be specified multiple times or can hold multiple values in a single
field have a key with a plural name. See :class:`Metadata` whose attributes
match the keys of this dictionary.
Core metadata fields that can be specified multiple times are stored as a
list or dict depending on which is appropriate for the field. Any fields
which hold multiple values in a single field are stored as a list.
"""
# Metadata 1.0 - PEP 241
metadata_version: str
name: str
version: str
platforms: list[str]
summary: str
description: str
keywords: list[str]
home_page: str
author: str
author_email: str
license: str
# Metadata 1.1 - PEP 314
supported_platforms: list[str]
download_url: str
classifiers: list[str]
requires: list[str]
provides: list[str]
obsoletes: list[str]
# Metadata 1.2 - PEP 345
maintainer: str
maintainer_email: str
requires_dist: list[str]
provides_dist: list[str]
obsoletes_dist: list[str]
requires_python: str
requires_external: list[str]
project_urls: dict[str, str]
# Metadata 2.0
# PEP 426 attempted to completely revamp the metadata format
# but got stuck without ever being able to build consensus on
# it and ultimately ended up withdrawn.
#
# However, a number of tools had started emitting METADATA with
# `2.0` Metadata-Version, so for historical reasons, this version
# was skipped.
# Metadata 2.1 - PEP 566
description_content_type: str
provides_extra: list[str]
# Metadata 2.2 - PEP 643
dynamic: list[str]
# Metadata 2.3 - PEP 685
# No new fields were added in PEP 685, just some edge case were
# tightened up to provide better interoptability.
# Metadata 2.4 - PEP 639
license_expression: str
license_files: list[str]
_STRING_FIELDS = {
"author",
"author_email",
"description",
"description_content_type",
"download_url",
"home_page",
"license",
"license_expression",
"maintainer",
"maintainer_email",
"metadata_version",
"name",
"requires_python",
"summary",
"version",
}
_LIST_FIELDS = {
"classifiers",
"dynamic",
"license_files",
"obsoletes",
"obsoletes_dist",
"platforms",
"provides",
"provides_dist",
"provides_extra",
"requires",
"requires_dist",
"requires_external",
"supported_platforms",
}
_DICT_FIELDS = {
"project_urls",
}
def _parse_keywords(data: str) -> list[str]:
"""Split a string of comma-separated keywords into a list of keywords."""
return [k.strip() for k in data.split(",")]
def _parse_project_urls(data: list[str]) -> dict[str, str]:
"""Parse a list of label/URL string pairings separated by a comma."""
urls = {}
for pair in data:
# Our logic is slightly tricky here as we want to try and do
# *something* reasonable with malformed data.
#
# The main thing that we have to worry about, is data that does
# not have a ',' at all to split the label from the Value. There
# isn't a singular right answer here, and we will fail validation
# later on (if the caller is validating) so it doesn't *really*
# matter, but since the missing value has to be an empty str
# and our return value is dict[str, str], if we let the key
# be the missing value, then they'd have multiple '' values that
# overwrite each other in a accumulating dict.
#
# The other potentional issue is that it's possible to have the
# same label multiple times in the metadata, with no solid "right"
# answer with what to do in that case. As such, we'll do the only
# thing we can, which is treat the field as unparseable and add it
# to our list of unparsed fields.
parts = [p.strip() for p in pair.split(",", 1)]
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
# TODO: The spec doesn't say anything about if the keys should be
# considered case sensitive or not... logically they should
# be case-preserving and case-insensitive, but doing that
# would open up more cases where we might have duplicate
# entries.
label, url = parts
if label in urls:
# The label already exists in our set of urls, so this field
# is unparseable, and we can just add the whole thing to our
# unparseable data and stop processing it.
raise KeyError("duplicate labels in project urls")
urls[label] = url
return urls
def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
"""Get the body of the message."""
# If our source is a str, then our caller has managed encodings for us,
# and we don't need to deal with it.
if isinstance(source, str):
payload = msg.get_payload()
assert isinstance(payload, str)
return payload
# If our source is a bytes, then we're managing the encoding and we need
# to deal with it.
else:
bpayload = msg.get_payload(decode=True)
assert isinstance(bpayload, bytes)
try:
return bpayload.decode("utf8", "strict")
except UnicodeDecodeError as exc:
raise ValueError("payload in an invalid encoding") from exc
# The various parse_FORMAT functions here are intended to be as lenient as
# possible in their parsing, while still returning a correctly typed
# RawMetadata.
#
# To aid in this, we also generally want to do as little touching of the
# data as possible, except where there are possibly some historic holdovers
# that make valid data awkward to work with.
#
# While this is a lower level, intermediate format than our ``Metadata``
# class, some light touch ups can make a massive difference in usability.
# Map METADATA fields to RawMetadata.
_EMAIL_TO_RAW_MAPPING = {
"author": "author",
"author-email": "author_email",
"classifier": "classifiers",
"description": "description",
"description-content-type": "description_content_type",
"download-url": "download_url",
"dynamic": "dynamic",
"home-page": "home_page",
"keywords": "keywords",
"license": "license",
"license-expression": "license_expression",
"license-file": "license_files",
"maintainer": "maintainer",
"maintainer-email": "maintainer_email",
"metadata-version": "metadata_version",
"name": "name",
"obsoletes": "obsoletes",
"obsoletes-dist": "obsoletes_dist",
"platform": "platforms",
"project-url": "project_urls",
"provides": "provides",
"provides-dist": "provides_dist",
"provides-extra": "provides_extra",
"requires": "requires",
"requires-dist": "requires_dist",
"requires-external": "requires_external",
"requires-python": "requires_python",
"summary": "summary",
"supported-platform": "supported_platforms",
"version": "version",
}
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
This function returns a two-item tuple of dicts. The first dict is of
recognized fields from the core metadata specification. Fields that can be
parsed and translated into Python's built-in types are converted
appropriately. All other fields are left as-is. Fields that are allowed to
appear multiple times are stored as lists.
The second dict contains all other fields from the metadata. This includes
any unrecognized fields. It also includes any fields which are expected to
be parsed into a built-in type but were not formatted appropriately. Finally,
any fields that are expected to appear only once but are repeated are
included in this dict.
"""
raw: dict[str, str | list[str] | dict[str, str]] = {}
unparsed: dict[str, list[str]] = {}
if isinstance(data, str):
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
else:
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
# We have to wrap parsed.keys() in a set, because in the case of multiple
# values for a key (a list), the key will appear multiple times in the
# list of keys, but we're avoiding that by using get_all().
for name in frozenset(parsed.keys()):
# Header names in RFC are case insensitive, so we'll normalize to all
# lower case to make comparisons easier.
name = name.lower()
# We use get_all() here, even for fields that aren't multiple use,
# because otherwise someone could have e.g. two Name fields, and we
# would just silently ignore it rather than doing something about it.
headers = parsed.get_all(name) or []
# The way the email module works when parsing bytes is that it
# unconditionally decodes the bytes as ascii using the surrogateescape
# handler. When you pull that data back out (such as with get_all() ),
# it looks to see if the str has any surrogate escapes, and if it does
# it wraps it in a Header object instead of returning the string.
#
# As such, we'll look for those Header objects, and fix up the encoding.
value = []
# Flag if we have run into any issues processing the headers, thus
# signalling that the data belongs in 'unparsed'.
valid_encoding = True
for h in headers:
# It's unclear if this can return more types than just a Header or
# a str, so we'll just assert here to make sure.
assert isinstance(h, (email.header.Header, str))
# If it's a header object, we need to do our little dance to get
# the real data out of it. In cases where there is invalid data
# we're going to end up with mojibake, but there's no obvious, good
# way around that without reimplementing parts of the Header object
# ourselves.
#
# That should be fine since, if mojibacked happens, this key is
# going into the unparsed dict anyways.
if isinstance(h, email.header.Header):
# The Header object stores it's data as chunks, and each chunk
# can be independently encoded, so we'll need to check each
# of them.
chunks: list[tuple[bytes, str | None]] = []
for bin, encoding in email.header.decode_header(h):
try:
bin.decode("utf8", "strict")
except UnicodeDecodeError:
# Enable mojibake.
encoding = "latin1"
valid_encoding = False
else:
encoding = "utf8"
chunks.append((bin, encoding))
# Turn our chunks back into a Header object, then let that
# Header object do the right thing to turn them into a
# string for us.
value.append(str(email.header.make_header(chunks)))
# This is already a string, so just add it.
else:
value.append(h)
# We've processed all of our values to get them into a list of str,
# but we may have mojibake data, in which case this is an unparsed
# field.
if not valid_encoding:
unparsed[name] = value
continue
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
if raw_name is None:
# This is a bit of a weird situation, we've encountered a key that
# we don't know what it means, so we don't know whether it's meant
# to be a list or not.
#
# Since we can't really tell one way or another, we'll just leave it
# as a list, even though it may be a single item list, because that's
# what makes the most sense for email headers.
unparsed[name] = value
continue
# If this is one of our string fields, then we'll check to see if our
# value is a list of a single item. If it is then we'll assume that
# it was emitted as a single string, and unwrap the str from inside
# the list.
#
# If it's any other kind of data, then we haven't the faintest clue
# what we should parse it as, and we have to just add it to our list
# of unparsed stuff.
if raw_name in _STRING_FIELDS and len(value) == 1:
raw[raw_name] = value[0]
# If this is one of our list of string fields, then we can just assign
# the value, since email *only* has strings, and our get_all() call
# above ensures that this is a list.
elif raw_name in _LIST_FIELDS:
raw[raw_name] = value
# Special Case: Keywords
# The keywords field is implemented in the metadata spec as a str,
# but it conceptually is a list of strings, and is serialized using
# ", ".join(keywords), so we'll do some light data massaging to turn
# this into what it logically is.
elif raw_name == "keywords" and len(value) == 1:
raw[raw_name] = _parse_keywords(value[0])
# Special Case: Project-URL
# The project urls is implemented in the metadata spec as a list of
# specially-formatted strings that represent a key and a value, which
# is fundamentally a mapping, however the email format doesn't support
# mappings in a sane way, so it was crammed into a list of strings
# instead.
#
# We will do a little light data massaging to turn this into a map as
# it logically should be.
elif raw_name == "project_urls":
try:
raw[raw_name] = _parse_project_urls(value)
except KeyError:
unparsed[name] = value
# Nothing that we've done has managed to parse this, so it'll just
# throw it in our unparseable data and move on.
else:
unparsed[name] = value
# We need to support getting the Description from the message payload in
# addition to getting it from the the headers. This does mean, though, there
# is the possibility of it being set both ways, in which case we put both
# in 'unparsed' since we don't know which is right.
try:
payload = _get_payload(parsed, data)
except ValueError:
unparsed.setdefault("description", []).append(
parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload]
)
else:
if payload:
# Check to see if we've already got a description, if so then both
# it, and this body move to unparseable.
if "description" in raw:
description_header = cast(str, raw.pop("description"))
unparsed.setdefault("description", []).extend(
[description_header, payload]
)
elif "description" in unparsed:
unparsed["description"].append(payload)
else:
raw["description"] = payload
# We need to cast our `raw` to a metadata, because a TypedDict only support
# literal key names, but we're computing our key names on purpose, but the
# way this function is implemented, our `TypedDict` can only have valid key
# names.
return cast(RawMetadata, raw), unparsed
_NOT_FOUND = object()
# Keep the two values in sync.
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
class _Validator(Generic[T]):
"""Validate a metadata field.
All _process_*() methods correspond to a core metadata field. The method is
called with the field's raw value. If the raw value is valid it is returned
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
as appropriate).
"""
name: str
raw_name: str
added: _MetadataVersion
def __init__(
self,
*,
added: _MetadataVersion = "1.0",
) -> None:
self.added = added
def __set_name__(self, _owner: Metadata, name: str) -> None:
self.name = name
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
# With Python 3.8, the caching can be replaced with functools.cached_property().
# No need to check the cache as attribute lookup will resolve into the
# instance's __dict__ before __get__ is called.
cache = instance.__dict__
value = instance._raw.get(self.name)
# To make the _process_* methods easier, we'll check if the value is None
# and if this field is NOT a required attribute, and if both of those
# things are true, we'll skip the the converter. This will mean that the
# converters never have to deal with the None union.
if self.name in _REQUIRED_ATTRS or value is not None:
try:
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
except AttributeError:
pass
else:
value = converter(value)
cache[self.name] = value
try:
del instance._raw[self.name] # type: ignore[misc]
except KeyError:
pass
return cast(T, value)
def _invalid_metadata(
self, msg: str, cause: Exception | None = None
) -> InvalidMetadata:
exc = InvalidMetadata(
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
)
exc.__cause__ = cause
return exc
def _process_metadata_version(self, value: str) -> _MetadataVersion:
# Implicitly makes Metadata-Version required.
if value not in _VALID_METADATA_VERSIONS:
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
return cast(_MetadataVersion, value)
def _process_name(self, value: str) -> str:
if not value:
raise self._invalid_metadata("{field} is a required field")
# Validate the name as a side-effect.
try:
utils.canonicalize_name(value, validate=True)
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
else:
return value
def _process_version(self, value: str) -> version_module.Version:
if not value:
raise self._invalid_metadata("{field} is a required field")
try:
return version_module.parse(value)
except version_module.InvalidVersion as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
def _process_summary(self, value: str) -> str:
"""Check the field contains no newlines."""
if "\n" in value:
raise self._invalid_metadata("{field} must be a single line")
return value
def _process_description_content_type(self, value: str) -> str:
content_types = {"text/plain", "text/x-rst", "text/markdown"}
message = email.message.EmailMessage()
message["content-type"] = value
content_type, parameters = (
# Defaults to `text/plain` if parsing failed.
message.get_content_type().lower(),
message["content-type"].params,
)
# Check if content-type is valid or defaulted to `text/plain` and thus was
# not parseable.
if content_type not in content_types or content_type not in value.lower():
raise self._invalid_metadata(
f"{{field}} must be one of {list(content_types)}, not {value!r}"
)
charset = parameters.get("charset", "UTF-8")
if charset != "UTF-8":
raise self._invalid_metadata(
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
)
markdown_variants = {"GFM", "CommonMark"}
variant = parameters.get("variant", "GFM") # Use an acceptable default.
if content_type == "text/markdown" and variant not in markdown_variants:
raise self._invalid_metadata(
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
f"not {variant!r}",
)
return value
def _process_dynamic(self, value: list[str]) -> list[str]:
for dynamic_field in map(str.lower, value):
if dynamic_field in {"name", "version", "metadata-version"}:
raise self._invalid_metadata(
f"{dynamic_field!r} is not allowed as a dynamic field"
)
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
raise self._invalid_metadata(
f"{dynamic_field!r} is not a valid dynamic field"
)
return list(map(str.lower, value))
def _process_provides_extra(
self,
value: list[str],
) -> list[utils.NormalizedName]:
normalized_names = []
try:
for name in value:
normalized_names.append(utils.canonicalize_name(name, validate=True))
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{name!r} is invalid for {{field}}", cause=exc
) from exc
else:
return normalized_names
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
try:
return specifiers.SpecifierSet(value)
except specifiers.InvalidSpecifier as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
def _process_requires_dist(
self,
value: list[str],
) -> list[requirements.Requirement]:
reqs = []
try:
for req in value:
reqs.append(requirements.Requirement(req))
except requirements.InvalidRequirement as exc:
raise self._invalid_metadata(
f"{req!r} is invalid for {{field}}", cause=exc
) from exc
else:
return reqs
def _process_license_expression(
self, value: str
) -> NormalizedLicenseExpression | None:
try:
return licenses.canonicalize_license_expression(value)
except ValueError as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
def _process_license_files(self, value: list[str]) -> list[str]:
paths = []
for path in value:
if ".." in path:
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, "
"parent directory indicators are not allowed"
)
if "*" in path:
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, paths must be resolved"
)
if (
pathlib.PurePosixPath(path).is_absolute()
or pathlib.PureWindowsPath(path).is_absolute()
):
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, paths must be relative"
)
if pathlib.PureWindowsPath(path).as_posix() != path:
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, paths must use '/' delimiter"
)
paths.append(path)
return paths
class Metadata:
"""Representation of distribution metadata.
Compared to :class:`RawMetadata`, this class provides objects representing
metadata fields instead of only using built-in types. Any invalid metadata
will cause :exc:`InvalidMetadata` to be raised (with a
:py:attr:`~BaseException.__cause__` attribute as appropriate).
"""
_raw: RawMetadata
@classmethod
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
"""Create an instance from :class:`RawMetadata`.
If *validate* is true, all metadata will be validated. All exceptions
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
"""
ins = cls()
ins._raw = data.copy() # Mutations occur due to caching enriched values.
if validate:
exceptions: list[Exception] = []
try:
metadata_version = ins.metadata_version
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
except InvalidMetadata as metadata_version_exc:
exceptions.append(metadata_version_exc)
metadata_version = None
# Make sure to check for the fields that are present, the required
# fields (so their absence can be reported).
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
# Remove fields that have already been checked.
fields_to_check -= {"metadata_version"}
for key in fields_to_check:
try:
if metadata_version:
# Can't use getattr() as that triggers descriptor protocol which
# will fail due to no value for the instance argument.
try:
field_metadata_version = cls.__dict__[key].added
except KeyError:
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
exceptions.append(exc)
continue
field_age = _VALID_METADATA_VERSIONS.index(
field_metadata_version
)
if field_age > metadata_age:
field = _RAW_TO_EMAIL_MAPPING[key]
exc = InvalidMetadata(
field,
f"{field} introduced in metadata version "
f"{field_metadata_version}, not {metadata_version}",
)
exceptions.append(exc)
continue
getattr(ins, key)
except InvalidMetadata as exc:
exceptions.append(exc)
if exceptions:
raise ExceptionGroup("invalid metadata", exceptions)
return ins
@classmethod
def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
"""Parse metadata from email headers.
If *validate* is true, the metadata will be validated. All exceptions
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
"""
raw, unparsed = parse_email(data)
if validate:
exceptions: list[Exception] = []
for unparsed_key in unparsed:
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
message = f"{unparsed_key!r} has invalid data"
else:
message = f"unrecognized field: {unparsed_key!r}"
exceptions.append(InvalidMetadata(unparsed_key, message))
if exceptions:
raise ExceptionGroup("unparsed", exceptions)
try:
return cls.from_raw(raw, validate=validate)
except ExceptionGroup as exc_group:
raise ExceptionGroup(
"invalid or unparsed metadata", exc_group.exceptions
) from None
metadata_version: _Validator[_MetadataVersion] = _Validator()
""":external:ref:`core-metadata-metadata-version`
(required; validated to be a valid metadata version)"""
# `name` is not normalized/typed to NormalizedName so as to provide access to
# the original/raw name.
name: _Validator[str] = _Validator()
""":external:ref:`core-metadata-name`
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
*validate* parameter)"""
version: _Validator[version_module.Version] = _Validator()
""":external:ref:`core-metadata-version` (required)"""
dynamic: _Validator[list[str] | None] = _Validator(
added="2.2",
)
""":external:ref:`core-metadata-dynamic`
(validated against core metadata field names and lowercased)"""
platforms: _Validator[list[str] | None] = _Validator()
""":external:ref:`core-metadata-platform`"""
supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
""":external:ref:`core-metadata-supported-platform`"""
summary: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body
""":external:ref:`core-metadata-description`"""
description_content_type: _Validator[str | None] = _Validator(added="2.1")
""":external:ref:`core-metadata-description-content-type` (validated)"""
keywords: _Validator[list[str] | None] = _Validator()
""":external:ref:`core-metadata-keywords`"""
home_page: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-home-page`"""
download_url: _Validator[str | None] = _Validator(added="1.1")
""":external:ref:`core-metadata-download-url`"""
author: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-author`"""
author_email: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-author-email`"""
maintainer: _Validator[str | None] = _Validator(added="1.2")
""":external:ref:`core-metadata-maintainer`"""
maintainer_email: _Validator[str | None] = _Validator(added="1.2")
""":external:ref:`core-metadata-maintainer-email`"""
license: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-license`"""
license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(
added="2.4"
)
""":external:ref:`core-metadata-license-expression`"""
license_files: _Validator[list[str] | None] = _Validator(added="2.4")
""":external:ref:`core-metadata-license-file`"""
classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
""":external:ref:`core-metadata-classifier`"""
requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
added="1.2"
)
""":external:ref:`core-metadata-requires-dist`"""
requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
added="1.2"
)
""":external:ref:`core-metadata-requires-python`"""
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
# don't do any processing on the values.
requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
""":external:ref:`core-metadata-requires-external`"""
project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
""":external:ref:`core-metadata-project-url`"""
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
# regardless of metadata version.
provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
added="2.1",
)
""":external:ref:`core-metadata-provides-extra`"""
provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
""":external:ref:`core-metadata-provides-dist`"""
obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
""":external:ref:`core-metadata-obsoletes-dist`"""
requires: _Validator[list[str] | None] = _Validator(added="1.1")
"""``Requires`` (deprecated)"""
provides: _Validator[list[str] | None] = _Validator(added="1.1")
"""``Provides`` (deprecated)"""
obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
"""``Obsoletes`` (deprecated)"""

View File

@ -0,0 +1,91 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
from typing import Any, Iterator
from ._parser import parse_requirement as _parse_requirement
from ._tokenizer import ParserSyntaxError
from .markers import Marker, _normalize_extra_values
from .specifiers import SpecifierSet
from .utils import canonicalize_name
class InvalidRequirement(ValueError):
"""
An invalid requirement was found, users should refer to PEP 508.
"""
class Requirement:
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
string.
"""
# TODO: Can we test whether something is contained within a requirement?
# If so how do we do that? Do we need to test against the _name_ of
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string: str) -> None:
try:
parsed = _parse_requirement(requirement_string)
except ParserSyntaxError as e:
raise InvalidRequirement(str(e)) from e
self.name: str = parsed.name
self.url: str | None = parsed.url or None
self.extras: set[str] = set(parsed.extras or [])
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker: Marker | None = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
def _iter_parts(self, name: str) -> Iterator[str]:
yield name
if self.extras:
formatted_extras = ",".join(sorted(self.extras))
yield f"[{formatted_extras}]"
if self.specifier:
yield str(self.specifier)
if self.url:
yield f"@ {self.url}"
if self.marker:
yield " "
if self.marker:
yield f"; {self.marker}"
def __str__(self) -> str:
return "".join(self._iter_parts(self.name))
def __repr__(self) -> str:
return f"<Requirement('{self}')>"
def __hash__(self) -> int:
return hash(
(
self.__class__.__name__,
*self._iter_parts(canonicalize_name(self.name)),
)
)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
canonicalize_name(self.name) == canonicalize_name(other.name)
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
and self.marker == other.marker
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,656 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import logging
import platform
import re
import struct
import subprocess
import sys
import sysconfig
from importlib.machinery import EXTENSION_SUFFIXES
from typing import (
Iterable,
Iterator,
Sequence,
Tuple,
cast,
)
from . import _manylinux, _musllinux
logger = logging.getLogger(__name__)
PythonVersion = Sequence[int]
AppleVersion = Tuple[int, int]
INTERPRETER_SHORT_NAMES: dict[str, str] = {
"python": "py", # Generic.
"cpython": "cp",
"pypy": "pp",
"ironpython": "ip",
"jython": "jy",
}
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
class Tag:
"""
A representation of the tag triple for a wheel.
Instances are considered immutable and thus are hashable. Equality checking
is also supported.
"""
__slots__ = ["_abi", "_hash", "_interpreter", "_platform"]
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
self._interpreter = interpreter.lower()
self._abi = abi.lower()
self._platform = platform.lower()
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
# that a set calls its `.disjoint()` method, which may be called hundreds of
# times when scanning a page of links for packages with tags matching that
# Set[Tag]. Pre-computing the value here produces significant speedups for
# downstream consumers.
self._hash = hash((self._interpreter, self._abi, self._platform))
@property
def interpreter(self) -> str:
return self._interpreter
@property
def abi(self) -> str:
return self._abi
@property
def platform(self) -> str:
return self._platform
def __eq__(self, other: object) -> bool:
if not isinstance(other, Tag):
return NotImplemented
return (
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
and (self._platform == other._platform)
and (self._abi == other._abi)
and (self._interpreter == other._interpreter)
)
def __hash__(self) -> int:
return self._hash
def __str__(self) -> str:
return f"{self._interpreter}-{self._abi}-{self._platform}"
def __repr__(self) -> str:
return f"<{self} @ {id(self)}>"
def parse_tag(tag: str) -> frozenset[Tag]:
"""
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
Returning a set is required due to the possibility that the tag is a
compressed tag set.
"""
tags = set()
interpreters, abis, platforms = tag.split("-")
for interpreter in interpreters.split("."):
for abi in abis.split("."):
for platform_ in platforms.split("."):
tags.add(Tag(interpreter, abi, platform_))
return frozenset(tags)
def _get_config_var(name: str, warn: bool = False) -> int | str | None:
value: int | str | None = sysconfig.get_config_var(name)
if value is None and warn:
logger.debug(
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
)
return value
def _normalize_string(string: str) -> str:
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
def _is_threaded_cpython(abis: list[str]) -> bool:
"""
Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
The threaded builds are indicated by a "t" in the abiflags.
"""
if len(abis) == 0:
return False
# expect e.g., cp313
m = re.match(r"cp\d+(.*)", abis[0])
if not m:
return False
abiflags = m.group(1)
return "t" in abiflags
def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
"""
Determine if the Python version supports abi3.
PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
builds do not support abi3.
"""
return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
py_version = tuple(py_version) # To allow for version comparison.
abis = []
version = _version_nodot(py_version[:2])
threading = debug = pymalloc = ucs4 = ""
with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
# extension modules is the best option.
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d"
if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
threading = "t"
if py_version < (3, 8):
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None:
pymalloc = "m"
if py_version < (3, 3):
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
if unicode_size == 4 or (
unicode_size is None and sys.maxunicode == 0x10FFFF
):
ucs4 = "u"
elif debug:
# Debug builds can also load "normal" extension modules.
# We can also assume no UCS-4 or pymalloc requirement.
abis.append(f"cp{version}{threading}")
abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
return abis
def cpython_tags(
python_version: PythonVersion | None = None,
abis: Iterable[str] | None = None,
platforms: Iterable[str] | None = None,
*,
warn: bool = False,
) -> Iterator[Tag]:
"""
Yields the tags for a CPython interpreter.
The tags consist of:
- cp<python_version>-<abi>-<platform>
- cp<python_version>-abi3-<platform>
- cp<python_version>-none-<platform>
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
If python_version only specifies a major version then user-provided ABIs and
the 'none' ABItag will be used.
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
their normal position and not at the beginning.
"""
if not python_version:
python_version = sys.version_info[:2]
interpreter = f"cp{_version_nodot(python_version[:2])}"
if abis is None:
if len(python_version) > 1:
abis = _cpython_abis(python_version, warn)
else:
abis = []
abis = list(abis)
# 'abi3' and 'none' are explicitly handled later.
for explicit_abi in ("abi3", "none"):
try:
abis.remove(explicit_abi)
except ValueError:
pass
platforms = list(platforms or platform_tags())
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
threading = _is_threaded_cpython(abis)
use_abi3 = _abi3_applies(python_version, threading)
if use_abi3:
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
if use_abi3:
for minor_version in range(python_version[1] - 1, 1, -1):
for platform_ in platforms:
version = _version_nodot((python_version[0], minor_version))
interpreter = f"cp{version}"
yield Tag(interpreter, "abi3", platform_)
def _generic_abi() -> list[str]:
"""
Return the ABI tag based on EXT_SUFFIX.
"""
# The following are examples of `EXT_SUFFIX`.
# We want to keep the parts which are related to the ABI and remove the
# parts which are related to the platform:
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
# - mac: '.cpython-310-darwin.so' => cp310
# - win: '.cp310-win_amd64.pyd' => cp310
# - win: '.pyd' => cp37 (uses _cpython_abis())
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
# => graalpy_38_native
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
parts = ext_suffix.split(".")
if len(parts) < 3:
# CPython3.7 and earlier uses ".pyd" on Windows.
return _cpython_abis(sys.version_info[:2])
soabi = parts[1]
if soabi.startswith("cpython"):
# non-windows
abi = "cp" + soabi.split("-")[1]
elif soabi.startswith("cp"):
# windows
abi = soabi.split("-")[0]
elif soabi.startswith("pypy"):
abi = "-".join(soabi.split("-")[:2])
elif soabi.startswith("graalpy"):
abi = "-".join(soabi.split("-")[:3])
elif soabi:
# pyston, ironpython, others?
abi = soabi
else:
return []
return [_normalize_string(abi)]
def generic_tags(
interpreter: str | None = None,
abis: Iterable[str] | None = None,
platforms: Iterable[str] | None = None,
*,
warn: bool = False,
) -> Iterator[Tag]:
"""
Yields the tags for a generic interpreter.
The tags consist of:
- <interpreter>-<abi>-<platform>
The "none" ABI will be added if it was not explicitly provided.
"""
if not interpreter:
interp_name = interpreter_name()
interp_version = interpreter_version(warn=warn)
interpreter = "".join([interp_name, interp_version])
if abis is None:
abis = _generic_abi()
else:
abis = list(abis)
platforms = list(platforms or platform_tags())
if "none" not in abis:
abis.append("none")
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
"""
Yields Python versions in descending order.
After the latest version, the major-only version will be yielded, and then
all previous versions of that major version.
"""
if len(py_version) > 1:
yield f"py{_version_nodot(py_version[:2])}"
yield f"py{py_version[0]}"
if len(py_version) > 1:
for minor in range(py_version[1] - 1, -1, -1):
yield f"py{_version_nodot((py_version[0], minor))}"
def compatible_tags(
python_version: PythonVersion | None = None,
interpreter: str | None = None,
platforms: Iterable[str] | None = None,
) -> Iterator[Tag]:
"""
Yields the sequence of tags that are compatible with a specific version of Python.
The tags consist of:
- py*-none-<platform>
- <interpreter>-none-any # ... if `interpreter` is provided.
- py*-none-any
"""
if not python_version:
python_version = sys.version_info[:2]
platforms = list(platforms or platform_tags())
for version in _py_interpreter_range(python_version):
for platform_ in platforms:
yield Tag(version, "none", platform_)
if interpreter:
yield Tag(interpreter, "none", "any")
for version in _py_interpreter_range(python_version):
yield Tag(version, "none", "any")
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
if not is_32bit:
return arch
if arch.startswith("ppc"):
return "ppc"
return "i386"
def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]:
formats = [cpu_arch]
if cpu_arch == "x86_64":
if version < (10, 4):
return []
formats.extend(["intel", "fat64", "fat32"])
elif cpu_arch == "i386":
if version < (10, 4):
return []
formats.extend(["intel", "fat32", "fat"])
elif cpu_arch == "ppc64":
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
if version > (10, 5) or version < (10, 4):
return []
formats.append("fat64")
elif cpu_arch == "ppc":
if version > (10, 6):
return []
formats.extend(["fat32", "fat"])
if cpu_arch in {"arm64", "x86_64"}:
formats.append("universal2")
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
formats.append("universal")
return formats
def mac_platforms(
version: AppleVersion | None = None, arch: str | None = None
) -> Iterator[str]:
"""
Yields the platform tags for a macOS system.
The `version` parameter is a two-item tuple specifying the macOS version to
generate platform tags for. The `arch` parameter is the CPU architecture to
generate platform tags for. Both parameters default to the appropriate value
for the current system.
"""
version_str, _, cpu_arch = platform.mac_ver()
if version is None:
version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
if version == (10, 16):
# When built against an older macOS SDK, Python will report macOS 10.16
# instead of the real version.
version_str = subprocess.run(
[
sys.executable,
"-sS",
"-c",
"import platform; print(platform.mac_ver()[0])",
],
check=True,
env={"SYSTEM_VERSION_COMPAT": "0"},
stdout=subprocess.PIPE,
text=True,
).stdout
version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
else:
version = version
if arch is None:
arch = _mac_arch(cpu_arch)
else:
arch = arch
if (10, 0) <= version and version < (11, 0):
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
# "minor" version number. The major version was always 10.
major_version = 10
for minor_version in range(version[1], -1, -1):
compat_version = major_version, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
if version >= (11, 0):
# Starting with Mac OS 11, each yearly release bumps the major version
# number. The minor versions are now the midyear updates.
minor_version = 0
for major_version in range(version[0], 10, -1):
compat_version = major_version, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
if version >= (11, 0):
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
# releases exist.
#
# However, the "universal2" binary format can have a
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
# that version of macOS.
major_version = 10
if arch == "x86_64":
for minor_version in range(16, 3, -1):
compat_version = major_version, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
else:
for minor_version in range(16, 3, -1):
compat_version = major_version, minor_version
binary_format = "universal2"
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
def ios_platforms(
version: AppleVersion | None = None, multiarch: str | None = None
) -> Iterator[str]:
"""
Yields the platform tags for an iOS system.
:param version: A two-item tuple specifying the iOS version to generate
platform tags for. Defaults to the current iOS version.
:param multiarch: The CPU architecture+ABI to generate platform tags for -
(the value used by `sys.implementation._multiarch` e.g.,
`arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current
multiarch value.
"""
if version is None:
# if iOS is the current platform, ios_ver *must* be defined. However,
# it won't exist for CPython versions before 3.13, which causes a mypy
# error.
_, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore]
version = cast("AppleVersion", tuple(map(int, release.split(".")[:2])))
if multiarch is None:
multiarch = sys.implementation._multiarch
multiarch = multiarch.replace("-", "_")
ios_platform_template = "ios_{major}_{minor}_{multiarch}"
# Consider any iOS major.minor version from the version requested, down to
# 12.0. 12.0 is the first iOS version that is known to have enough features
# to support CPython. Consider every possible minor release up to X.9. There
# highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra
# candidates that won't ever match doesn't really hurt, and it saves us from
# having to keep an explicit list of known iOS versions in the code. Return
# the results descending order of version number.
# If the requested major version is less than 12, there won't be any matches.
if version[0] < 12:
return
# Consider the actual X.Y version that was requested.
yield ios_platform_template.format(
major=version[0], minor=version[1], multiarch=multiarch
)
# Consider every minor version from X.0 to the minor version prior to the
# version requested by the platform.
for minor in range(version[1] - 1, -1, -1):
yield ios_platform_template.format(
major=version[0], minor=minor, multiarch=multiarch
)
for major in range(version[0] - 1, 11, -1):
for minor in range(9, -1, -1):
yield ios_platform_template.format(
major=major, minor=minor, multiarch=multiarch
)
def android_platforms(
api_level: int | None = None, abi: str | None = None
) -> Iterator[str]:
"""
Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on
non-Android platforms, the ``api_level`` and ``abi`` arguments are required.
:param int api_level: The maximum `API level
<https://developer.android.com/tools/releases/platforms>`__ to return. Defaults
to the current system's version, as returned by ``platform.android_ver``.
:param str abi: The `Android ABI <https://developer.android.com/ndk/guides/abis>`__,
e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by
``sysconfig.get_platform``. Hyphens and periods will be replaced with
underscores.
"""
if platform.system() != "Android" and (api_level is None or abi is None):
raise TypeError(
"on non-Android platforms, the api_level and abi arguments are required"
)
if api_level is None:
# Python 3.13 was the first version to return platform.system() == "Android",
# and also the first version to define platform.android_ver().
api_level = platform.android_ver().api_level # type: ignore[attr-defined]
if abi is None:
abi = sysconfig.get_platform().split("-")[-1]
abi = _normalize_string(abi)
# 16 is the minimum API level known to have enough features to support CPython
# without major patching. Yield every API level from the maximum down to the
# minimum, inclusive.
min_api_level = 16
for ver in range(api_level, min_api_level - 1, -1):
yield f"android_{ver}_{abi}"
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
linux = _normalize_string(sysconfig.get_platform())
if not linux.startswith("linux_"):
# we should never be here, just yield the sysconfig one and return
yield linux
return
if is_32bit:
if linux == "linux_x86_64":
linux = "linux_i686"
elif linux == "linux_aarch64":
linux = "linux_armv8l"
_, arch = linux.split("_", 1)
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
yield from _manylinux.platform_tags(archs)
yield from _musllinux.platform_tags(archs)
for arch in archs:
yield f"linux_{arch}"
def _generic_platforms() -> Iterator[str]:
yield _normalize_string(sysconfig.get_platform())
def platform_tags() -> Iterator[str]:
"""
Provides the platform tags for this installation.
"""
if platform.system() == "Darwin":
return mac_platforms()
elif platform.system() == "iOS":
return ios_platforms()
elif platform.system() == "Android":
return android_platforms()
elif platform.system() == "Linux":
return _linux_platforms()
else:
return _generic_platforms()
def interpreter_name() -> str:
"""
Returns the name of the running interpreter.
Some implementations have a reserved, two-letter abbreviation which will
be returned when appropriate.
"""
name = sys.implementation.name
return INTERPRETER_SHORT_NAMES.get(name) or name
def interpreter_version(*, warn: bool = False) -> str:
"""
Returns the version of the running interpreter.
"""
version = _get_config_var("py_version_nodot", warn=warn)
if version:
version = str(version)
else:
version = _version_nodot(sys.version_info[:2])
return version
def _version_nodot(version: PythonVersion) -> str:
return "".join(map(str, version))
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
"""
Returns the sequence of tag triples for the running interpreter.
The order of the sequence corresponds to priority order for the
interpreter, from most to least important.
"""
interp_name = interpreter_name()
if interp_name == "cp":
yield from cpython_tags(warn=warn)
else:
yield from generic_tags()
if interp_name == "pp":
interp = "pp3"
elif interp_name == "cp":
interp = "cp" + interpreter_version(warn=warn)
else:
interp = None
yield from compatible_tags(interpreter=interp)

View File

@ -0,0 +1,163 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import functools
import re
from typing import NewType, Tuple, Union, cast
from .tags import Tag, parse_tag
from .version import InvalidVersion, Version, _TrimmedRelease
BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
class InvalidName(ValueError):
"""
An invalid distribution name; users should refer to the packaging user guide.
"""
class InvalidWheelFilename(ValueError):
"""
An invalid wheel filename was found, users should refer to PEP 427.
"""
class InvalidSdistFilename(ValueError):
"""
An invalid sdist filename was found, users should refer to the packaging user guide.
"""
# Core metadata spec for `Name`
_validate_regex = re.compile(
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
)
_canonicalize_regex = re.compile(r"[-_.]+")
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
# PEP 427: The build number must start with a digit.
_build_tag_regex = re.compile(r"(\d+)(.*)")
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
if validate and not _validate_regex.match(name):
raise InvalidName(f"name is invalid: {name!r}")
# This is taken from PEP 503.
value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
def is_normalized_name(name: str) -> bool:
return _normalized_regex.match(name) is not None
@functools.singledispatch
def canonicalize_version(
version: Version | str, *, strip_trailing_zero: bool = True
) -> str:
"""
Return a canonical form of a version as a string.
>>> canonicalize_version('1.0.1')
'1.0.1'
Per PEP 625, versions may have multiple canonical forms, differing
only by trailing zeros.
>>> canonicalize_version('1.0.0')
'1'
>>> canonicalize_version('1.0.0', strip_trailing_zero=False)
'1.0.0'
Invalid versions are returned unaltered.
>>> canonicalize_version('foo bar baz')
'foo bar baz'
"""
return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version)
@canonicalize_version.register
def _(version: str, *, strip_trailing_zero: bool = True) -> str:
try:
parsed = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero)
def parse_wheel_filename(
filename: str,
) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
if not filename.endswith(".whl"):
raise InvalidWheelFilename(
f"Invalid wheel filename (extension must be '.whl'): {filename!r}"
)
filename = filename[:-4]
dashes = filename.count("-")
if dashes not in (4, 5):
raise InvalidWheelFilename(
f"Invalid wheel filename (wrong number of parts): {filename!r}"
)
parts = filename.split("-", dashes - 2)
name_part = parts[0]
# See PEP 427 for the rules on escaping the project name.
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
raise InvalidWheelFilename(f"Invalid project name: {filename!r}")
name = canonicalize_name(name_part)
try:
version = Version(parts[1])
except InvalidVersion as e:
raise InvalidWheelFilename(
f"Invalid wheel filename (invalid version): {filename!r}"
) from e
if dashes == 5:
build_part = parts[2]
build_match = _build_tag_regex.match(build_part)
if build_match is None:
raise InvalidWheelFilename(
f"Invalid build number: {build_part} in {filename!r}"
)
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
else:
build = ()
tags = parse_tag(parts[-1])
return (name, version, build, tags)
def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
if filename.endswith(".tar.gz"):
file_stem = filename[: -len(".tar.gz")]
elif filename.endswith(".zip"):
file_stem = filename[: -len(".zip")]
else:
raise InvalidSdistFilename(
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
f" {filename!r}"
)
# We are requiring a PEP 440 version, which cannot contain dashes,
# so we split on the last dash.
name_part, sep, version_part = file_stem.rpartition("-")
if not sep:
raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")
name = canonicalize_name(name_part)
try:
version = Version(version_part)
except InvalidVersion as e:
raise InvalidSdistFilename(
f"Invalid sdist filename (invalid version): {filename!r}"
) from e
return (name, version)

Some files were not shown because too many files have changed in this diff Show More