2 Cometimentos

100 ficheiros alterados com 55 adições e 4807 eliminações
  1. BIN
      __pycache__/app.cpython-310.pyc
  2. BIN
      __pycache__/security.cpython-310.pyc
  3. +2
    -2
      app.old
  4. +11
    -2
      app.py
  5. +15
    -0
      config_env.py
  6. BIN
      routes/__pycache__/reslevis.cpython-310.pyc
  7. +12
    -27
      routes/reslevis.py
  8. +15
    -8
      security.py
  9. +0
    -247
      venv/bin/Activate.ps1
  10. +0
    -69
      venv/bin/activate
  11. +0
    -26
      venv/bin/activate.csh
  12. +0
    -69
      venv/bin/activate.fish
  13. +0
    -8
      venv/bin/change_tz
  14. +0
    -8
      venv/bin/dotenv
  15. +0
    -8
      venv/bin/email_validator
  16. +0
    -8
      venv/bin/fastapi
  17. +0
    -8
      venv/bin/httpx
  18. +0
    -8
      venv/bin/ics_diff
  19. +0
    -8
      venv/bin/markdown-it
  20. +0
    -8
      venv/bin/normalizer
  21. +0
    -7
      venv/bin/openapi-python-client
  22. +0
    -8
      venv/bin/pip
  23. +0
    -8
      venv/bin/pip3
  24. +0
    -8
      venv/bin/pip3.10
  25. +0
    -8
      venv/bin/pygmentize
  26. +0
    -7
      venv/bin/pyrsa-decrypt
  27. +0
    -7
      venv/bin/pyrsa-encrypt
  28. +0
    -7
      venv/bin/pyrsa-keygen
  29. +0
    -7
      venv/bin/pyrsa-priv2pub
  30. +0
    -7
      venv/bin/pyrsa-sign
  31. +0
    -7
      venv/bin/pyrsa-verify
  32. +0
    -1
      venv/bin/python
  33. +0
    -1
      venv/bin/python3
  34. +0
    -1
      venv/bin/python3.10
  35. BIN
      venv/bin/ruff
  36. +0
    -8
      venv/bin/typer
  37. +0
    -8
      venv/bin/uvicorn
  38. +0
    -8
      venv/bin/watchfiles
  39. +0
    -8
      venv/bin/websockets
  40. +0
    -164
      venv/include/site/python3.10/greenlet/greenlet.h
  41. +0
    -1
      venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER
  42. +0
    -28
      venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt
  43. +0
    -92
      venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/METADATA
  44. +0
    -14
      venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/RECORD
  45. +0
    -6
      venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL
  46. +0
    -1
      venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt
  47. +0
    -7
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/AUTHORS.rst
  48. +0
    -1
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/INSTALLER
  49. +0
    -21
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/LICENSE
  50. +0
    -106
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/METADATA
  51. +0
    -32
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/RECORD
  52. +0
    -5
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/WHEEL
  53. +0
    -1
      venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/top_level.txt
  54. +0
    -1
      venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER
  55. +0
    -20
      venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE
  56. +0
    -46
      venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA
  57. +0
    -43
      venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD
  58. +0
    -6
      venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL
  59. +0
    -2
      venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt
  60. BIN
      venv/lib/python3.10/site-packages/__pycache__/antiorm.cpython-310.pyc
  61. BIN
      venv/lib/python3.10/site-packages/__pycache__/antipool.cpython-310.pyc
  62. BIN
      venv/lib/python3.10/site-packages/__pycache__/dbapiext.cpython-310.pyc
  63. BIN
      venv/lib/python3.10/site-packages/__pycache__/dbrelmgr.cpython-310.pyc
  64. BIN
      venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc
  65. BIN
      venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc
  66. BIN
      venv/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so
  67. +0
    -239
      venv/lib/python3.10/site-packages/_distutils_hack/__init__.py
  68. BIN
      venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc
  69. BIN
      venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc
  70. +0
    -1
      venv/lib/python3.10/site-packages/_distutils_hack/override.py
  71. BIN
      venv/lib/python3.10/site-packages/_ruamel_yaml.cpython-310-x86_64-linux-gnu.so
  72. +0
    -33
      venv/lib/python3.10/site-packages/_yaml/__init__.py
  73. BIN
      venv/lib/python3.10/site-packages/_yaml/__pycache__/__init__.cpython-310.pyc
  74. +0
    -1
      venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/INSTALLER
  75. +0
    -295
      venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/METADATA
  76. +0
    -10
      venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/RECORD
  77. +0
    -4
      venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/WHEEL
  78. +0
    -21
      venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE
  79. +0
    -432
      venv/lib/python3.10/site-packages/annotated_types/__init__.py
  80. BIN
      venv/lib/python3.10/site-packages/annotated_types/__pycache__/__init__.cpython-310.pyc
  81. BIN
      venv/lib/python3.10/site-packages/annotated_types/__pycache__/test_cases.cpython-310.pyc
  82. +0
    -0
      venv/lib/python3.10/site-packages/annotated_types/py.typed
  83. +0
    -151
      venv/lib/python3.10/site-packages/annotated_types/test_cases.py
  84. +0
    -1
      venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/INSTALLER
  85. +0
    -26
      venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/METADATA
  86. +0
    -14
      venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/RECORD
  87. +0
    -5
      venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/WHEEL
  88. +0
    -339
      venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/licenses/COPYING
  89. +0
    -4
      venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/top_level.txt
  90. +0
    -731
      venv/lib/python3.10/site-packages/antiorm.py
  91. +0
    -966
      venv/lib/python3.10/site-packages/antipool.py
  92. +0
    -1
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/INSTALLER
  93. +0
    -20
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/LICENSE
  94. +0
    -105
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/METADATA
  95. +0
    -88
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/RECORD
  96. +0
    -5
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/WHEEL
  97. +0
    -2
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/entry_points.txt
  98. +0
    -1
      venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/top_level.txt
  99. +0
    -85
      venv/lib/python3.10/site-packages/anyio/__init__.py
  100. BIN
      venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc

BIN
__pycache__/app.cpython-310.pyc Ver ficheiro


BIN
__pycache__/security.cpython-310.pyc Ver ficheiro


+ 2
- 2
app.old Ver ficheiro

@@ -64,8 +64,8 @@ reslevis_router = _reslevis.router
from fastapi import FastAPI, Security
from fastapi.security import OAuth2AuthorizationCodeBearer

AUTH_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/auth"
TOKEN_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/token"
#AUTH_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/auth"
#TOKEN_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/token"

oauth2 = OAuth2AuthorizationCodeBearer(
authorizationUrl=AUTH_URL,


+ 11
- 2
app.py Ver ficheiro

@@ -9,6 +9,9 @@ from typing import Any, Dict, List, Optional
# import wave
import os
import shutil
# import enviroment variables
import config_env
#other
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Callable
@@ -63,8 +66,14 @@ reslevis_router = _reslevis.router
from fastapi import FastAPI, Security
from fastapi.security import OAuth2AuthorizationCodeBearer

AUTH_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/auth"
TOKEN_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/token"
#AUTH_URL = "https://10.251.0.30:10002/realms/API.Server.local/protocol/openid-connect/auth"
#AUTH_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/auth"
#TOKEN_URL = "https://10.251.0.30:10002/realms/API.Server.local/protocol/openid-connect/token"
#TOKEN_URL = "https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/token"


AUTH_URL = config_env.KEYCLOAK_AUTH_URL
TOKEN_URL = config_env.KEYCLOAK_TOKEN_URL

oauth2 = OAuth2AuthorizationCodeBearer(
authorizationUrl=AUTH_URL,


+ 15
- 0
config_env.py Ver ficheiro

@@ -0,0 +1,15 @@
#This file reads the .env where the variables should be stored
import os
from dotenv import load_dotenv

load_dotenv()

SECRET = os.getenv("SECRET")
KEYCLOAK_AUDIENCE = os.getenv("KEYCLOAK_AUDIENCE")
KEYCLOAK_SERVER = os.getenv("KEYCLOAK_SERVER")
KEYCLOAK_ISSUER = os.getenv("KEYCLOAK_ISSUER")
KEYCLOAK_PROTOCOL_ENDPOINT = os.getenv("KEYCLOAK_PROTOCOL_ENDPOINT")
KEYCLOAK_JWKS_URL = os.getenv("KEYCLOAK_JWKS_URL")
KEYCLOAK_AUTH_URL = os.getenv("KEYCLOAK_AUTH_URL")
KEYCLOAK_TOKEN_URL = os.getenv("KEYCLOAK_TOKEN_URL")


BIN
routes/__pycache__/reslevis.cpython-310.pyc Ver ficheiro


+ 12
- 27
routes/reslevis.py Ver ficheiro

@@ -6,15 +6,11 @@ from schemas.reslevis import (
)
from logica_reslevis.gateway import GatewayJsonRepository

# importa le dipendenze di sicurezza
from security import get_current_user, require_roles
from security import get_current_user

gateway_repo = GatewayJsonRepository()
router = APIRouter()

# -----------------------
# Endpoints pubblici (se vuoi che restino pubblici, niente Depends)
# -----------------------
@router.get("/getBuildings", response_model=List[BuildingItem], tags=["Reslevis"])
def getBuildings():
return []
@@ -27,9 +23,6 @@ def getPlans():
def getZones():
return []

# -----------------------
# Endpoints protetti: richiedono almeno un Bearer token valido
# -----------------------
@router.get("/getGateways", response_model=List[GatewayItem], tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def getGateways():
return gateway_repo.list()
@@ -54,18 +47,10 @@ def getAlarms():
def getTracks():
return []

# -----------------------
# Operazioni di scrittura su Gateway:
# - Token valido
# - Ruolo richiesto (esempio: "reslevis:write")
# Cambia il nome ruolo per allinearlo a come lo hai definito in Keycloak
# -----------------------
write_role = "reslevis:write" # esempio; usa il tuo realm/client role

@router.post(
"/postGateway",
tags=["Reslevis"],
dependencies=[Depends(require_roles(write_role))]
dependencies=[Depends(get_current_user)]
)
def postGateway(item: GatewayItem):
try:
@@ -79,7 +64,7 @@ def postGateway(item: GatewayItem):
@router.put(
"/updateGateway",
tags=["Reslevis"],
dependencies=[Depends(require_roles(write_role))]
dependencies=[Depends(get_current_user)]
)
def updateGateway(item: GatewayItem):
try:
@@ -96,7 +81,7 @@ def updateGateway(item: GatewayItem):
@router.delete(
"/removeGateway/{gateway_id}",
tags=["Reslevis"],
dependencies=[Depends(require_roles(write_role))]
dependencies=[Depends(get_current_user)]
)
def removeGateway(gateway_id: str):
try:
@@ -107,35 +92,35 @@ def removeGateway(gateway_id: str):
except Exception as e:
raise HTTPException(status_code=500, detail=f"Errore interno: {e}")

@router.post("/postBuilding", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postBuilding", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postBuilding(item: BuildingItem):
return {"message": "OK"}

@router.post("/postPlan", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postPlan", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postPlan(item: PlanItem):
return {"message": "OK"}

@router.post("/postZone", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postZone", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postZone(item: ZoneItem):
return {"message": "OK"}

@router.post("/postTracker", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postTracker", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postTracker(item: TrackerItem):
return {"message": "OK"}

@router.post("/postOperator", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postOperator", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postOperator(item: OperatorItem):
return {"message": "OK"}

@router.post("/postSubject", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postSubject", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postSubject(item: SubjectItem):
return {"message": "OK"}

@router.post("/postAlarm", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postAlarm", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postAlarm(item: AlarmItem):
return {"message": "OK"}

@router.post("/postTrack", tags=["Reslevis"], dependencies=[Depends(require_roles(write_role))])
@router.post("/postTrack", tags=["Reslevis"], dependencies=[Depends(get_current_user)])
def postTrack(item: TrackItem):
return {"message": "OK"}


+ 15
- 8
security.py Ver ficheiro

@@ -3,6 +3,7 @@ from typing import Dict, Any, List, Optional
import os
import logging
import httpx
import config_env
from jose import jwt, JWTError
from fastapi import HTTPException, status, Depends
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
@@ -10,14 +11,20 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
logger = logging.getLogger("security")

# === CONFIG ===
KEYCLOAK_ISSUER = os.getenv(
"KEYCLOAK_ISSUER",
"https://192.168.1.3:10002/realms/API.Server.local",
)
KEYCLOAK_JWKS_URL = os.getenv(
"KEYCLOAK_JWKS_URL",
"https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/certs",
)
#KEYCLOAK_ISSUER = os.getenv(
# "KEYCLOAK_ISSUER",
# "https://10.251.0.30:10002/realms/API.Server.local",
#"https://192.168.1.3:10002/realms/API.Server.local",
#)
#KEYCLOAK_JWKS_URL = os.getenv(
# "KEYCLOAK_JWKS_URL",
# "https://10.251.0.30:10002/realms/API.Server.local/protocol/openid-connect/certs",
#"https://192.168.1.3:10002/realms/API.Server.local/protocol/openid-connect/certs",
#)

KEYCLOAK_ISSUER = config_env.KEYCLOAK_ISSUER
KEYCLOAK_JWKS_URL = config_env.KEYCLOAK_JWKS_URL

KEYCLOAK_AUDIENCE = os.getenv("KEYCLOAK_AUDIENCE", "Fastapi")

ALGORITHMS = ["RS256", "RS384", "RS512", "PS256", "PS384", "PS512"]


+ 0
- 247
venv/bin/Activate.ps1 Ver ficheiro

@@ -1,247 +0,0 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

+ 0
- 69
venv/bin/activate Ver ficheiro

@@ -1,69 +0,0 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi

unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV=/opt/FastAPI/.venv
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/"bin":$PATH"
export PATH

# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1='(.venv) '"${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT='(.venv) '
export VIRTUAL_ENV_PROMPT
fi

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

+ 0
- 26
venv/bin/activate.csh Ver ficheiro

@@ -1,26 +0,0 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>

alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'

# Unset irrelevant variables.
deactivate nondestructive

setenv VIRTUAL_ENV /opt/FastAPI/.venv

set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"


set _OLD_VIRTUAL_PROMPT="$prompt"

if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = '(.venv) '"$prompt"
setenv VIRTUAL_ENV_PROMPT '(.venv) '
endif

alias pydoc python -m pydoc

rehash

+ 0
- 69
venv/bin/activate.fish Ver ficheiro

@@ -1,69 +0,0 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/); you cannot run it directly.

function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end

if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end

set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end

# Unset irrelevant variables.
deactivate nondestructive

set -gx VIRTUAL_ENV /opt/FastAPI/.venv

set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/"bin $PATH

# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end

if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.

# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt

# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status

# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) '(.venv) ' (set_color normal)

# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end

set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT '(.venv) '
end

+ 0
- 8
venv/bin/change_tz Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from vobject.change_tz import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/dotenv Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from dotenv.__main__ import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli())

+ 0
- 8
venv/bin/email_validator Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from email_validator.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/fastapi Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from fastapi_cli.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/httpx Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from httpx import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/ics_diff Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from vobject.ics_diff import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/markdown-it Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from markdown_it.cli.parse import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/normalizer Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli.cli_detect())

+ 0
- 7
venv/bin/openapi-python-client Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from openapi_python_client.cli import app
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(app())

+ 0
- 8
venv/bin/pip Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/pip3 Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/pip3.10 Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/pygmentize Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pygments.cmdline import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 7
venv/bin/pyrsa-decrypt Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from rsa.cli import decrypt
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(decrypt())

+ 0
- 7
venv/bin/pyrsa-encrypt Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from rsa.cli import encrypt
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(encrypt())

+ 0
- 7
venv/bin/pyrsa-keygen Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from rsa.cli import keygen
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(keygen())

+ 0
- 7
venv/bin/pyrsa-priv2pub Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from rsa.util import private_to_public
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(private_to_public())

+ 0
- 7
venv/bin/pyrsa-sign Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from rsa.cli import sign
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(sign())

+ 0
- 7
venv/bin/pyrsa-verify Ver ficheiro

@@ -1,7 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
import sys
from rsa.cli import verify
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(verify())

+ 0
- 1
venv/bin/python Ver ficheiro

@@ -1 +0,0 @@
/usr/lib/python-exec/python3.10/python

+ 0
- 1
venv/bin/python3 Ver ficheiro

@@ -1 +0,0 @@
python

+ 0
- 1
venv/bin/python3.10 Ver ficheiro

@@ -1 +0,0 @@
python

BIN
venv/bin/ruff Ver ficheiro


+ 0
- 8
venv/bin/typer Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from typer.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/uvicorn Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from uvicorn.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 8
venv/bin/watchfiles Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from watchfiles.cli import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli())

+ 0
- 8
venv/bin/websockets Ver ficheiro

@@ -1,8 +0,0 @@
#!/opt/FastAPI/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from websockets.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

+ 0
- 164
venv/include/site/python3.10/greenlet/greenlet.h Ver ficheiro

@@ -1,164 +0,0 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */

/* Greenlet object interface */

#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H


#include <Python.h>

#ifdef __cplusplus
extern "C" {
#endif

/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"

#ifndef GREENLET_MODULE
#define implementation_ptr_t void*
#endif

typedef struct _greenlet {
PyObject_HEAD
PyObject* weakreflist;
PyObject* dict;
implementation_ptr_t pimpl;
} PyGreenlet;

#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))


/* C API functions */

/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 12

#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2

#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7

#define PyGreenlet_MAIN_NUM 8
#define PyGreenlet_STARTED_NUM 9
#define PyGreenlet_ACTIVE_NUM 10
#define PyGreenlet_GET_PARENT_NUM 11

#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;

# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])

# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])

# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])

/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])

/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])

/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])

/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])

/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])

/*
* PyGreenlet_GetParent(PyObject* greenlet)
*
* return greenlet.parent;
*
* This could return NULL even if there is no exception active.
* If it does not return NULL, you are responsible for decrementing the
* reference count.
*/
# define PyGreenlet_GetParent \
(*(PyGreenlet* (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])

/*
* deprecated, undocumented alias.
*/
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent

# define PyGreenlet_MAIN \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_MAIN_NUM])

# define PyGreenlet_STARTED \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_STARTED_NUM])

# define PyGreenlet_ACTIVE \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])




/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}

#endif /* GREENLET_MODULE */

#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

+ 0
- 1
venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER Ver ficheiro

@@ -1 +0,0 @@
pip

+ 0
- 28
venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt Ver ficheiro

@@ -1,28 +0,0 @@
Copyright 2010 Pallets

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:

1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.

3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 0
- 92
venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/METADATA Ver ficheiro

@@ -1,92 +0,0 @@
Metadata-Version: 2.1
Name: MarkupSafe
Version: 3.0.2
Summary: Safely add untrusted strings to HTML/XML markup.
Maintainer-email: Pallets <contact@palletsprojects.com>
License: Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
Project-URL: Source, https://github.com/pallets/markupsafe/
Project-URL: Chat, https://discord.gg/pallets
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Text Processing :: Markup :: HTML
Classifier: Typing :: Typed
Requires-Python: >=3.9
Description-Content-Type: text/markdown
License-File: LICENSE.txt

# MarkupSafe

MarkupSafe implements a text object that escapes characters so it is
safe to use in HTML and XML. Characters that have special meanings are
replaced so that they display as the actual characters. This mitigates
injection attacks, meaning untrusted user input can safely be displayed
on a page.


## Examples

```pycon
>>> from markupsafe import Markup, escape

>>> # escape replaces special characters and wraps in Markup
>>> escape("<script>alert(document.cookie);</script>")
Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')

>>> # wrap in Markup to mark text "safe" and prevent escaping
>>> Markup("<strong>Hello</strong>")
Markup('<strong>hello</strong>')

>>> escape(Markup("<strong>Hello</strong>"))
Markup('<strong>hello</strong>')

>>> # Markup is a str subclass
>>> # methods and operators escape their arguments
>>> template = Markup("Hello <em>{name}</em>")
>>> template.format(name='"World"')
Markup('Hello <em>&#34;World&#34;</em>')
```

## Donate

The Pallets organization develops and supports MarkupSafe and other
popular packages. In order to grow the community of contributors and
users, and allow the maintainers to devote more time to the projects,
[please donate today][].

[please donate today]: https://palletsprojects.com/donate

+ 0
- 14
venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/RECORD Ver ficheiro

@@ -1,14 +0,0 @@
MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975
MarkupSafe-3.0.2.dist-info/RECORD,,
MarkupSafe-3.0.2.dist-info/WHEEL,sha256=_kVlewavvOSnwZE_whBk3jlE_Ob-nL5GvlVcLkpXSD8,151
MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214
markupsafe/__pycache__/__init__.cpython-310.pyc,,
markupsafe/__pycache__/_native.cpython-310.pyc,,
markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210
markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149
markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so,sha256=x4RoxWgyqAEokk-AZrWvrLDxLE-dm-zZSZYV_gOiLJA,34976
markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+ 0
- 6
venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL Ver ficheiro

@@ -1,6 +0,0 @@
Wheel-Version: 1.0
Generator: setuptools (75.2.0)
Root-Is-Purelib: false
Tag: cp310-cp310-manylinux_2_17_x86_64
Tag: cp310-cp310-manylinux2014_x86_64


+ 0
- 1
venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt Ver ficheiro

@@ -1 +0,0 @@
markupsafe

+ 0
- 7
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/AUTHORS.rst Ver ficheiro

@@ -1,7 +0,0 @@
Authors
=======

``pyjwt`` is currently written and maintained by `Jose Padilla <https://github.com/jpadilla>`_.
Originally written and maintained by `Jeff Lindsay <https://github.com/progrium>`_.

A full list of contributors can be found on GitHub’s `overview <https://github.com/jpadilla/pyjwt/graphs/contributors>`_.

+ 0
- 1
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/INSTALLER Ver ficheiro

@@ -1 +0,0 @@
pip

+ 0
- 21
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/LICENSE Ver ficheiro

@@ -1,21 +0,0 @@
The MIT License (MIT)

Copyright (c) 2015-2022 José Padilla

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

+ 0
- 106
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/METADATA Ver ficheiro

@@ -1,106 +0,0 @@
Metadata-Version: 2.1
Name: PyJWT
Version: 2.10.1
Summary: JSON Web Token implementation in Python
Author-email: Jose Padilla <hello@jpadilla.com>
License: MIT
Project-URL: Homepage, https://github.com/jpadilla/pyjwt
Keywords: json,jwt,security,signing,token,web
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Natural Language :: English
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Topic :: Utilities
Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: AUTHORS.rst
Provides-Extra: crypto
Requires-Dist: cryptography>=3.4.0; extra == "crypto"
Provides-Extra: dev
Requires-Dist: coverage[toml]==5.0.4; extra == "dev"
Requires-Dist: cryptography>=3.4.0; extra == "dev"
Requires-Dist: pre-commit; extra == "dev"
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "dev"
Requires-Dist: sphinx; extra == "dev"
Requires-Dist: sphinx-rtd-theme; extra == "dev"
Requires-Dist: zope.interface; extra == "dev"
Provides-Extra: docs
Requires-Dist: sphinx; extra == "docs"
Requires-Dist: sphinx-rtd-theme; extra == "docs"
Requires-Dist: zope.interface; extra == "docs"
Provides-Extra: tests
Requires-Dist: coverage[toml]==5.0.4; extra == "tests"
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "tests"

PyJWT
=====

.. image:: https://github.com/jpadilla/pyjwt/workflows/CI/badge.svg
:target: https://github.com/jpadilla/pyjwt/actions?query=workflow%3ACI

.. image:: https://img.shields.io/pypi/v/pyjwt.svg
:target: https://pypi.python.org/pypi/pyjwt

.. image:: https://codecov.io/gh/jpadilla/pyjwt/branch/master/graph/badge.svg
:target: https://codecov.io/gh/jpadilla/pyjwt

.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=stable
:target: https://pyjwt.readthedocs.io/en/stable/

A Python implementation of `RFC 7519 <https://tools.ietf.org/html/rfc7519>`_. Original implementation was written by `@progrium <https://github.com/progrium>`_.

Sponsor
-------

.. |auth0-logo| image:: https://github.com/user-attachments/assets/ee98379e-ee76-4bcb-943a-e25c4ea6d174
:width: 160px

+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/signup <https://auth0.com/signup?utm_source=external_sites&utm_medium=pyjwt&utm_campaign=devn_signup>`_. |
+--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+

Installing
----------

Install with **pip**:

.. code-block:: console

$ pip install PyJWT


Usage
-----

.. code-block:: pycon

>>> import jwt
>>> encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256")
>>> print(encoded)
eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg
>>> jwt.decode(encoded, "secret", algorithms=["HS256"])
{'some': 'payload'}

Documentation
-------------

View the full docs online at https://pyjwt.readthedocs.io/en/stable/


Tests
-----

You can run tests from the project root after cloning with:

.. code-block:: console

$ tox

+ 0
- 32
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/RECORD Ver ficheiro

@@ -1,32 +0,0 @@
PyJWT-2.10.1.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322
PyJWT-2.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyJWT-2.10.1.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085
PyJWT-2.10.1.dist-info/METADATA,sha256=EkewF6D6KU8SGaaQzVYfxUUU1P_gs_dp1pYTkoYvAx8,3990
PyJWT-2.10.1.dist-info/RECORD,,
PyJWT-2.10.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
PyJWT-2.10.1.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
jwt/__init__.py,sha256=VB2vFKuboTjcDGeZ8r-UqK_dz3NsQSQEqySSICby8Xg,1711
jwt/__pycache__/__init__.cpython-310.pyc,,
jwt/__pycache__/algorithms.cpython-310.pyc,,
jwt/__pycache__/api_jwk.cpython-310.pyc,,
jwt/__pycache__/api_jws.cpython-310.pyc,,
jwt/__pycache__/api_jwt.cpython-310.pyc,,
jwt/__pycache__/exceptions.cpython-310.pyc,,
jwt/__pycache__/help.cpython-310.pyc,,
jwt/__pycache__/jwk_set_cache.cpython-310.pyc,,
jwt/__pycache__/jwks_client.cpython-310.pyc,,
jwt/__pycache__/types.cpython-310.pyc,,
jwt/__pycache__/utils.cpython-310.pyc,,
jwt/__pycache__/warnings.cpython-310.pyc,,
jwt/algorithms.py,sha256=cKr-XEioe0mBtqJMCaHEswqVOA1Z8Purt5Sb3Bi-5BE,30409
jwt/api_jwk.py,sha256=6F1r7rmm8V5qEnBKA_xMjS9R7VoANe1_BL1oD2FrAjE,4451
jwt/api_jws.py,sha256=aM8vzqQf6mRrAw7bRy-Moj_pjWsKSVQyYK896AfMjJU,11762
jwt/api_jwt.py,sha256=OGT4hok1l5A6FH_KdcrU5g6u6EQ8B7em0r9kGM9SYgA,14512
jwt/exceptions.py,sha256=bUIOJ-v9tjopTLS-FYOTc3kFx5WP5IZt7ksN_HE1G9Q,1211
jwt/help.py,sha256=vFdNzjQoAch04XCMYpCkyB2blaqHAGAqQrtf9nSPkdk,1808
jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959
jwt/jwks_client.py,sha256=p9b-IbQqo2tEge9Zit3oSPBFNePqwho96VLbnUrHUWs,4259
jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99
jwt/utils.py,sha256=hxOjvDBheBYhz-RIPiEz7Q88dSUSTMzEdKE_Ww2VdJw,3640
jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59

+ 0
- 5
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/WHEEL Ver ficheiro

@@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: setuptools (75.6.0)
Root-Is-Purelib: true
Tag: py3-none-any


+ 0
- 1
venv/lib/python3.10/site-packages/PyJWT-2.10.1.dist-info/top_level.txt Ver ficheiro

@@ -1 +0,0 @@
jwt

+ 0
- 1
venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER Ver ficheiro

@@ -1 +0,0 @@
pip

+ 0
- 20
venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE Ver ficheiro

@@ -1,20 +0,0 @@
Copyright (c) 2017-2021 Ingy döt Net
Copyright (c) 2006-2016 Kirill Simonov

Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

+ 0
- 46
venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA Ver ficheiro

@@ -1,46 +0,0 @@
Metadata-Version: 2.1
Name: PyYAML
Version: 6.0.2
Summary: YAML parser and emitter for Python
Home-page: https://pyyaml.org/
Download-URL: https://pypi.org/project/PyYAML/
Author: Kirill Simonov
Author-email: xi@resolvent.net
License: MIT
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
Project-URL: CI, https://github.com/yaml/pyyaml/actions
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
Project-URL: Source Code, https://github.com/yaml/pyyaml
Platform: Any
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Cython
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Markup
Requires-Python: >=3.8
License-File: LICENSE

YAML is a data serialization format designed for human readability
and interaction with scripting languages. PyYAML is a YAML parser
and emitter for Python.

PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
support, capable extension API, and sensible error messages. PyYAML
supports standard YAML tags and provides Python-specific tags that
allow to represent an arbitrary Python object.

PyYAML is applicable for a broad range of tasks from complex
configuration files to object serialization and persistence.

+ 0
- 43
venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD Ver ficheiro

@@ -1,43 +0,0 @@
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060
PyYAML-6.0.2.dist-info/RECORD,,
PyYAML-6.0.2.dist-info/WHEEL,sha256=baMMpUvyD0gnRdCe6fvqCg8rft4FNTdLqZQ01WfKJmc,152
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
_yaml/__pycache__/__init__.cpython-310.pyc,,
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
yaml/__pycache__/__init__.cpython-310.pyc,,
yaml/__pycache__/composer.cpython-310.pyc,,
yaml/__pycache__/constructor.cpython-310.pyc,,
yaml/__pycache__/cyaml.cpython-310.pyc,,
yaml/__pycache__/dumper.cpython-310.pyc,,
yaml/__pycache__/emitter.cpython-310.pyc,,
yaml/__pycache__/error.cpython-310.pyc,,
yaml/__pycache__/events.cpython-310.pyc,,
yaml/__pycache__/loader.cpython-310.pyc,,
yaml/__pycache__/nodes.cpython-310.pyc,,
yaml/__pycache__/parser.cpython-310.pyc,,
yaml/__pycache__/reader.cpython-310.pyc,,
yaml/__pycache__/representer.cpython-310.pyc,,
yaml/__pycache__/resolver.cpython-310.pyc,,
yaml/__pycache__/scanner.cpython-310.pyc,,
yaml/__pycache__/serializer.cpython-310.pyc,,
yaml/__pycache__/tokens.cpython-310.pyc,,
yaml/_yaml.cpython-310-x86_64-linux-gnu.so,sha256=20HV-cVpIFuOuVUTmQ1-PQIbyt0n8ctfXq7JCMIfbrU,2383664
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573

+ 0
- 6
venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL Ver ficheiro

@@ -1,6 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.44.0)
Root-Is-Purelib: false
Tag: cp310-cp310-manylinux_2_17_x86_64
Tag: cp310-cp310-manylinux2014_x86_64


+ 0
- 2
venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt Ver ficheiro

@@ -1,2 +0,0 @@
_yaml
yaml

BIN
venv/lib/python3.10/site-packages/__pycache__/antiorm.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/__pycache__/antipool.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/__pycache__/dbapiext.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/__pycache__/dbrelmgr.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so Ver ficheiro


+ 0
- 239
venv/lib/python3.10/site-packages/_distutils_hack/__init__.py Ver ficheiro

@@ -1,239 +0,0 @@
# don't import any costly modules
import os
import sys

report_url = (
"https://github.com/pypa/setuptools/issues/new?template=distutils-deprecation.yml"
)


def warn_distutils_present():
if 'distutils' not in sys.modules:
return
import warnings

warnings.warn(
"Distutils was imported before Setuptools, but importing Setuptools "
"also replaces the `distutils` module in `sys.modules`. This may lead "
"to undesirable behaviors or errors. To avoid these issues, avoid "
"using distutils directly, ensure that setuptools is installed in the "
"traditional way (e.g. not an editable install), and/or make sure "
"that setuptools is always imported before distutils."
)


def clear_distutils():
if 'distutils' not in sys.modules:
return
import warnings

warnings.warn(
"Setuptools is replacing distutils. Support for replacing "
"an already imported distutils is deprecated. In the future, "
"this condition will fail. "
f"Register concerns at {report_url}"
)
mods = [
name
for name in sys.modules
if name == "distutils" or name.startswith("distutils.")
]
for name in mods:
del sys.modules[name]


def enabled():
"""
Allow selection of distutils by environment variable.
"""
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
if which == 'stdlib':
import warnings

warnings.warn(
"Reliance on distutils from stdlib is deprecated. Users "
"must rely on setuptools to provide the distutils module. "
"Avoid importing distutils or import setuptools first, "
"and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib. "
f"Register concerns at {report_url}"
)
return which == 'local'


def ensure_local_distutils():
import importlib

clear_distutils()

# With the DistutilsMetaFinder in place,
# perform an import to cause distutils to be
# loaded from setuptools._distutils. Ref #2906.
with shim():
importlib.import_module('distutils')

# check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
assert 'setuptools._distutils.log' not in sys.modules


def do_override():
"""
Ensure that the local copy of distutils is preferred over stdlib.

See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
if enabled():
warn_distutils_present()
ensure_local_distutils()


class _TrivialRe:
def __init__(self, *patterns) -> None:
self._patterns = patterns

def match(self, string):
return all(pat in string for pat in self._patterns)


class DistutilsMetaFinder:
def find_spec(self, fullname, path, target=None):
# optimization: only consider top level modules and those
# found in the CPython test suite.
if path is not None and not fullname.startswith('test.'):
return None

method_name = 'spec_for_{fullname}'.format(**locals())
method = getattr(self, method_name, lambda: None)
return method()

def spec_for_distutils(self):
if self.is_cpython():
return None

import importlib
import importlib.abc
import importlib.util

try:
mod = importlib.import_module('setuptools._distutils')
except Exception:
# There are a couple of cases where setuptools._distutils
# may not be present:
# - An older Setuptools without a local distutils is
# taking precedence. Ref #2957.
# - Path manipulation during sitecustomize removes
# setuptools from the path but only after the hook
# has been loaded. Ref #2980.
# In either case, fall back to stdlib behavior.
return None

class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
mod.__name__ = 'distutils'
return mod

def exec_module(self, module):
pass

return importlib.util.spec_from_loader(
'distutils', DistutilsLoader(), origin=mod.__file__
)

@staticmethod
def is_cpython():
"""
Suppress supplying distutils for CPython (build and tests).
Ref #2965 and #3007.
"""
return os.path.isfile('pybuilddir.txt')

def spec_for_pip(self):
"""
Ensure stdlib distutils when running under pip.
See pypa/pip#8761 for rationale.
"""
if sys.version_info >= (3, 12) or self.pip_imported_during_build():
return
clear_distutils()
self.spec_for_distutils = lambda: None

@classmethod
def pip_imported_during_build(cls):
"""
Detect if pip is being imported in a build script. Ref #2355.
"""
import traceback

return any(
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
)

@staticmethod
def frame_file_is_setup(frame):
"""
Return True if the indicated frame suggests a setup.py file.
"""
# some frames may not have __file__ (#2940)
return frame.f_globals.get('__file__', '').endswith('setup.py')

def spec_for_sensitive_tests(self):
"""
Ensure stdlib distutils when running select tests under CPython.

python/cpython#91169
"""
clear_distutils()
self.spec_for_distutils = lambda: None

sensitive_tests = (
[
'test.test_distutils',
'test.test_peg_generator',
'test.test_importlib',
]
if sys.version_info < (3, 10)
else [
'test.test_distutils',
]
)


for name in DistutilsMetaFinder.sensitive_tests:
setattr(
DistutilsMetaFinder,
f'spec_for_{name}',
DistutilsMetaFinder.spec_for_sensitive_tests,
)


DISTUTILS_FINDER = DistutilsMetaFinder()


def add_shim():
DISTUTILS_FINDER in sys.meta_path or insert_shim()


class shim:
def __enter__(self) -> None:
insert_shim()

def __exit__(self, exc: object, value: object, tb: object) -> None:
_remove_shim()


def insert_shim():
sys.meta_path.insert(0, DISTUTILS_FINDER)


def _remove_shim():
try:
sys.meta_path.remove(DISTUTILS_FINDER)
except ValueError:
pass


if sys.version_info < (3, 12):
# DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632)
remove_shim = _remove_shim

BIN
venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc Ver ficheiro


+ 0
- 1
venv/lib/python3.10/site-packages/_distutils_hack/override.py Ver ficheiro

@@ -1 +0,0 @@
__import__('_distutils_hack').do_override()

BIN
venv/lib/python3.10/site-packages/_ruamel_yaml.cpython-310-x86_64-linux-gnu.so Ver ficheiro


+ 0
- 33
venv/lib/python3.10/site-packages/_yaml/__init__.py Ver ficheiro

@@ -1,33 +0,0 @@
# This is a stub package designed to roughly emulate the _yaml
# extension module, which previously existed as a standalone module
# and has been moved into the `yaml` package namespace.
# It does not perfectly mimic its old counterpart, but should get
# close enough for anyone who's relying on it even when they shouldn't.
import yaml

# in some circumstances, the yaml module we imoprted may be from a different version, so we need
# to tread carefully when poking at it here (it may not have the attributes we expect)
if not getattr(yaml, '__with_libyaml__', False):
from sys import version_info

exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
raise exc("No module named '_yaml'")
else:
from yaml._yaml import *
import warnings
warnings.warn(
'The _yaml extension module is now located at yaml._yaml'
' and its location is subject to change. To use the'
' LibYAML-based parser and emitter, import from `yaml`:'
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
DeprecationWarning
)
del warnings
# Don't `del yaml` here because yaml is actually an existing
# namespace member of _yaml.

__name__ = '_yaml'
# If the module is top-level (i.e. not a part of any specific package)
# then the attribute should be set to ''.
# https://docs.python.org/3.8/library/types.html
__package__ = ''

BIN
venv/lib/python3.10/site-packages/_yaml/__pycache__/__init__.cpython-310.pyc Ver ficheiro


+ 0
- 1
venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/INSTALLER Ver ficheiro

@@ -1 +0,0 @@
pip

+ 0
- 295
venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/METADATA Ver ficheiro

@@ -1,295 +0,0 @@
Metadata-Version: 2.3
Name: annotated-types
Version: 0.7.0
Summary: Reusable constraint types to use with typing.Annotated
Project-URL: Homepage, https://github.com/annotated-types/annotated-types
Project-URL: Source, https://github.com/annotated-types/annotated-types
Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin <s@muelcolvin.com>, Zac Hatfield-Dodds <zac@zhd.dev>
License-File: LICENSE
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Console
Classifier: Environment :: MacOS X
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: Information Technology
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Unix
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Typing :: Typed
Requires-Python: >=3.8
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
Description-Content-Type: text/markdown

# annotated-types

[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types)
[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types)
[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)

[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
adding context-specific metadata to existing types, and specifies that
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
logic for `x`.

This package provides metadata objects which can be used to represent common
constraints such as upper and lower bounds on scalar values and collection sizes,
a `Predicate` marker for runtime checks, and
descriptions of how we intend these metadata to be interpreted. In some cases,
we also note alternative representations which do not require this package.

## Install

```bash
pip install annotated-types
```

## Examples

```python
from typing import Annotated
from annotated_types import Gt, Len, Predicate

class MyClass:
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
# Invalid: 17, 18, "19", 19.0, ...
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
# Invalid: 4, 8, -2, 5.0, "prime", ...

my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
# Invalid: (1, 2), ["abc"], [0] * 20
```

## Documentation

_While `annotated-types` avoids runtime checks for performance, users should not
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
Downstream implementors may choose to raise an error, emit a warning, silently ignore
a metadata item, etc., if the metadata objects described below are used with an
incompatible type - or for any other reason!_

### Gt, Ge, Lt, Le

Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
dates, times, strings, sets, etc. Note that the boundary value need not be of the
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.

We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
the `annotated-types` package.

To be explicit, these types have the following meanings:

* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum

### Interval

`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
metadata object. `None` attributes should be ignored, and non-`None` attributes
treated as per the single bounds above.

### MultipleOf

`MultipleOf(multiple_of=x)` might be interpreted in two ways:

1. Python semantics, implying `value % multiple_of == 0`, or
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
where `int(value / multiple_of) == value / multiple_of`.

We encourage users to be aware of these two common interpretations and their
distinct behaviours, especially since very large or non-integer numbers make
it easy to cause silent data corruption due to floating-point imprecision.

We encourage libraries to carefully document which interpretation they implement.

### MinLen, MaxLen, Len

`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.

As well as `Len()` which can optionally include upper and lower bounds, we also
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
and `Len(max_length=y)` respectively.

`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.

Examples of usage:

* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8

#### Changed in v0.4.0

* `min_inclusive` has been renamed to `min_length`, no change in meaning
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
meaning of the upper bound in slices vs. `Len`

See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.

### Timezone

`Timezone` can be used with a `datetime` or a `time` to express which timezones
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
expresses that any timezone-aware datetime is allowed. You may also pass a specific
timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
allow a specific timezone, though we note that this is often a symptom of fragile design.

#### Changed in v0.x.x

* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
`timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.

### Unit

`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
would be a float representing a velocity in meters per second.

Please note that `annotated_types` itself makes no attempt to parse or validate
the unit string in any way. That is left entirely to downstream libraries,
such as [`pint`](https://pint.readthedocs.io) or
[`astropy.units`](https://docs.astropy.org/en/stable/units/).

An example of how a library might use this metadata:

```python
from annotated_types import Unit
from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args

# given a type annotated with a unit:
Meters = Annotated[float, Unit("m")]


# you can cast the annotation to a specific unit type with any
# callable that accepts a string and returns the desired type
T = TypeVar("T")
def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
if get_origin(tp) is Annotated:
for arg in get_args(tp):
if isinstance(arg, Unit):
return unit_cls(arg.unit)
return None


# using `pint`
import pint
pint_unit = cast_unit(Meters, pint.Unit)


# using `astropy.units`
import astropy.units as u
astropy_unit = cast_unit(Meters, u.Unit)
```

### Predicate

`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
Users should prefer the statically inspectable metadata above, but if you need
the full power and flexibility of arbitrary runtime predicates... here it is.

For some common constraints, we provide generic types:

* `IsLower = Annotated[T, Predicate(str.islower)]`
* `IsUpper = Annotated[T, Predicate(str.isupper)]`
* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
* `IsNan = Annotated[T, Predicate(math.isnan)]`
* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`

so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.

Some libraries might have special logic to handle known or understandable predicates,
for example by checking for `str.isdigit` and using its presence to both call custom
logic to enforce digit-only strings, and customise some generated external schema.
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.

To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.

We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
exception. We encourage libraries to document the behaviour they choose.

### Doc

`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.

It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.

It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.

This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).

### Integrating downstream types with `GroupedMetadata`

Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
This can help reduce verbosity and cognitive overhead for users.
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:

```python
from dataclasses import dataclass
from typing import Iterator
from annotated_types import GroupedMetadata, Ge

@dataclass
class Field(GroupedMetadata):
ge: int | None = None
description: str | None = None

def __iter__(self) -> Iterator[object]:
# Iterating over a GroupedMetadata object should yield annotated-types
# constraint metadata objects which describe it as fully as possible,
# and may include other unknown objects too.
if self.ge is not None:
yield Ge(self.ge)
if self.description is not None:
yield Description(self.description)
```

Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.

Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.

Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.

### Consuming metadata

We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).

It is up to the implementer to determine how this metadata is used.
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.

## Design & History

This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
and Hypothesis, with the goal of making it as easy as possible for end-users to
provide more informative annotations for use by runtime libraries.

It is deliberately minimal, and following PEP-593 allows considerable downstream
discretion in what (if anything!) they choose to support. Nonetheless, we expect
that staying simple and covering _only_ the most common use-cases will give users
and maintainers the best experience we can. If you'd like more constraints for your
types - follow our lead, by defining them and documenting them downstream!

+ 0
- 10
venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/RECORD Ver ficheiro

@@ -1,10 +0,0 @@
annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
annotated_types-0.7.0.dist-info/RECORD,,
annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
annotated_types/__pycache__/__init__.cpython-310.pyc,,
annotated_types/__pycache__/test_cases.cpython-310.pyc,,
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421

+ 0
- 4
venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/WHEEL Ver ficheiro

@@ -1,4 +0,0 @@
Wheel-Version: 1.0
Generator: hatchling 1.24.2
Root-Is-Purelib: true
Tag: py3-none-any

+ 0
- 21
venv/lib/python3.10/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE Ver ficheiro

@@ -1,21 +0,0 @@
The MIT License (MIT)

Copyright (c) 2022 the contributors

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

+ 0
- 432
venv/lib/python3.10/site-packages/annotated_types/__init__.py Ver ficheiro

@@ -1,432 +0,0 @@
import math
import sys
import types
from dataclasses import dataclass
from datetime import tzinfo
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union

if sys.version_info < (3, 8):
from typing_extensions import Protocol, runtime_checkable
else:
from typing import Protocol, runtime_checkable

if sys.version_info < (3, 9):
from typing_extensions import Annotated, Literal
else:
from typing import Annotated, Literal

if sys.version_info < (3, 10):
EllipsisType = type(Ellipsis)
KW_ONLY = {}
SLOTS = {}
else:
from types import EllipsisType

KW_ONLY = {"kw_only": True}
SLOTS = {"slots": True}


__all__ = (
'BaseMetadata',
'GroupedMetadata',
'Gt',
'Ge',
'Lt',
'Le',
'Interval',
'MultipleOf',
'MinLen',
'MaxLen',
'Len',
'Timezone',
'Predicate',
'LowerCase',
'UpperCase',
'IsDigits',
'IsFinite',
'IsNotFinite',
'IsNan',
'IsNotNan',
'IsInfinite',
'IsNotInfinite',
'doc',
'DocInfo',
'__version__',
)

__version__ = '0.7.0'


T = TypeVar('T')


# arguments that start with __ are considered
# positional only
# see https://peps.python.org/pep-0484/#positional-only-arguments


class SupportsGt(Protocol):
def __gt__(self: T, __other: T) -> bool:
...


class SupportsGe(Protocol):
def __ge__(self: T, __other: T) -> bool:
...


class SupportsLt(Protocol):
def __lt__(self: T, __other: T) -> bool:
...


class SupportsLe(Protocol):
def __le__(self: T, __other: T) -> bool:
...


class SupportsMod(Protocol):
def __mod__(self: T, __other: T) -> T:
...


class SupportsDiv(Protocol):
def __div__(self: T, __other: T) -> T:
...


class BaseMetadata:
"""Base class for all metadata.

This exists mainly so that implementers
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
"""

__slots__ = ()


@dataclass(frozen=True, **SLOTS)
class Gt(BaseMetadata):
"""Gt(gt=x) implies that the value must be greater than x.

It can be used with any type that supports the ``>`` operator,
including numbers, dates and times, strings, sets, and so on.
"""

gt: SupportsGt


@dataclass(frozen=True, **SLOTS)
class Ge(BaseMetadata):
"""Ge(ge=x) implies that the value must be greater than or equal to x.

It can be used with any type that supports the ``>=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""

ge: SupportsGe


@dataclass(frozen=True, **SLOTS)
class Lt(BaseMetadata):
"""Lt(lt=x) implies that the value must be less than x.

It can be used with any type that supports the ``<`` operator,
including numbers, dates and times, strings, sets, and so on.
"""

lt: SupportsLt


@dataclass(frozen=True, **SLOTS)
class Le(BaseMetadata):
"""Le(le=x) implies that the value must be less than or equal to x.

It can be used with any type that supports the ``<=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""

le: SupportsLe


@runtime_checkable
class GroupedMetadata(Protocol):
"""A grouping of multiple objects, like typing.Unpack.

`GroupedMetadata` on its own is not metadata and has no meaning.
All of the constraints and metadata should be fully expressable
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.

Concrete implementations should override `GroupedMetadata.__iter__()`
to add their own metadata.
For example:

>>> @dataclass
>>> class Field(GroupedMetadata):
>>> gt: float | None = None
>>> description: str | None = None
...
>>> def __iter__(self) -> Iterable[object]:
>>> if self.gt is not None:
>>> yield Gt(self.gt)
>>> if self.description is not None:
>>> yield Description(self.gt)

Also see the implementation of `Interval` below for an example.

Parsers should recognize this and unpack it so that it can be used
both with and without unpacking:

- `Annotated[int, Field(...)]` (parser must unpack Field)
- `Annotated[int, *Field(...)]` (PEP-646)
""" # noqa: trailing-whitespace

@property
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
return True

def __iter__(self) -> Iterator[object]:
...

if not TYPE_CHECKING:
__slots__ = () # allow subclasses to use slots

def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
# Basic ABC like functionality without the complexity of an ABC
super().__init_subclass__(*args, **kwargs)
if cls.__iter__ is GroupedMetadata.__iter__:
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")

def __iter__(self) -> Iterator[object]: # noqa: F811
raise NotImplementedError # more helpful than "None has no attribute..." type errors


@dataclass(frozen=True, **KW_ONLY, **SLOTS)
class Interval(GroupedMetadata):
"""Interval can express inclusive or exclusive bounds with a single object.

It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
are interpreted the same way as the single-bound constraints.
"""

gt: Union[SupportsGt, None] = None
ge: Union[SupportsGe, None] = None
lt: Union[SupportsLt, None] = None
le: Union[SupportsLe, None] = None

def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack an Interval into zero or more single-bounds."""
if self.gt is not None:
yield Gt(self.gt)
if self.ge is not None:
yield Ge(self.ge)
if self.lt is not None:
yield Lt(self.lt)
if self.le is not None:
yield Le(self.le)


@dataclass(frozen=True, **SLOTS)
class MultipleOf(BaseMetadata):
"""MultipleOf(multiple_of=x) might be interpreted in two ways:

1. Python semantics, implying ``value % multiple_of == 0``, or
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``

We encourage users to be aware of these two common interpretations,
and libraries to carefully document which they implement.
"""

multiple_of: Union[SupportsDiv, SupportsMod]


@dataclass(frozen=True, **SLOTS)
class MinLen(BaseMetadata):
"""
MinLen() implies minimum inclusive length,
e.g. ``len(value) >= min_length``.
"""

min_length: Annotated[int, Ge(0)]


@dataclass(frozen=True, **SLOTS)
class MaxLen(BaseMetadata):
"""
MaxLen() implies maximum inclusive length,
e.g. ``len(value) <= max_length``.
"""

max_length: Annotated[int, Ge(0)]


@dataclass(frozen=True, **SLOTS)
class Len(GroupedMetadata):
"""
Len() implies that ``min_length <= len(value) <= max_length``.

Upper bound may be omitted or ``None`` to indicate no upper length bound.
"""

min_length: Annotated[int, Ge(0)] = 0
max_length: Optional[Annotated[int, Ge(0)]] = None

def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack a Len into zone or more single-bounds."""
if self.min_length > 0:
yield MinLen(self.min_length)
if self.max_length is not None:
yield MaxLen(self.max_length)


@dataclass(frozen=True, **SLOTS)
class Timezone(BaseMetadata):
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).

``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
tz-aware but any timezone is allowed.

You may also pass a specific timezone string or tzinfo object such as
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
you only allow a specific timezone, though we note that this is often
a symptom of poor design.
"""

tz: Union[str, tzinfo, EllipsisType, None]


@dataclass(frozen=True, **SLOTS)
class Unit(BaseMetadata):
"""Indicates that the value is a physical quantity with the specified unit.

It is intended for usage with numeric types, where the value represents the
magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
or ``speed: Annotated[float, Unit('m/s')]``.

Interpretation of the unit string is left to the discretion of the consumer.
It is suggested to follow conventions established by python libraries that work
with physical quantities, such as

- ``pint`` : <https://pint.readthedocs.io/en/stable/>
- ``astropy.units``: <https://docs.astropy.org/en/stable/units/>

For indicating a quantity with a certain dimensionality but without a specific unit
it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
Note, however, ``annotated_types`` itself makes no use of the unit string.
"""

unit: str


@dataclass(frozen=True, **SLOTS)
class Predicate(BaseMetadata):
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.

Users should prefer statically inspectable metadata, but if you need the full
power and flexibility of arbitrary runtime predicates... here it is.

We provide a few predefined predicates for common string constraints:
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.

Some libraries might have special logic to handle certain predicates, e.g. by
checking for `str.isdigit` and using its presence to both call custom logic to
enforce digit-only strings, and customise some generated external schema.

We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting exception.
"""

func: Callable[[Any], bool]

def __repr__(self) -> str:
if getattr(self.func, "__name__", "<lambda>") == "<lambda>":
return f"{self.__class__.__name__}({self.func!r})"
if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
namespace := getattr(self.func.__self__, "__name__", None)
):
return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
if isinstance(self.func, type(str.isascii)): # method descriptor
return f"{self.__class__.__name__}({self.func.__qualname__})"
return f"{self.__class__.__name__}({self.func.__name__})"


@dataclass
class Not:
func: Callable[[Any], bool]

def __call__(self, __v: Any) -> bool:
return not self.func(__v)


_StrType = TypeVar("_StrType", bound=str)

LowerCase = Annotated[_StrType, Predicate(str.islower)]
"""
Return True if the string is a lowercase string, False otherwise.

A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
""" # noqa: E501
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
"""
Return True if the string is an uppercase string, False otherwise.

A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
""" # noqa: E501
IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
"""
Return True if the string is a digit string, False otherwise.

A string is a digit string if all characters in the string are digits and there is at least one character in the string.
""" # noqa: E501
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
"""
Return True if all characters in the string are ASCII, False otherwise.

ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
"""

_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
"""Return True if x is one of infinity or NaN, and False otherwise"""
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
"""Return True if x is a NaN (not a number), and False otherwise."""
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
"""Return True if x is anything but NaN (not a number), and False otherwise."""
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
"""Return True if x is a positive or negative infinity, and False otherwise."""
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
"""Return True if x is neither a positive or negative infinity, and False otherwise."""

try:
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
except ImportError:

@dataclass(frozen=True, **SLOTS)
class DocInfo: # type: ignore [no-redef]
""" "
The return value of doc(), mainly to be used by tools that want to extract the
Annotated documentation at runtime.
"""

documentation: str
"""The documentation string passed to doc()."""

def doc(
documentation: str,
) -> DocInfo:
"""
Add documentation to a type annotation inside of Annotated.

For example:

>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
"""
return DocInfo(documentation)

BIN
venv/lib/python3.10/site-packages/annotated_types/__pycache__/__init__.cpython-310.pyc Ver ficheiro


BIN
venv/lib/python3.10/site-packages/annotated_types/__pycache__/test_cases.cpython-310.pyc Ver ficheiro


+ 0
- 0
venv/lib/python3.10/site-packages/annotated_types/py.typed Ver ficheiro


+ 0
- 151
venv/lib/python3.10/site-packages/annotated_types/test_cases.py Ver ficheiro

@@ -1,151 +0,0 @@
import math
import sys
from datetime import date, datetime, timedelta, timezone
from decimal import Decimal
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple

if sys.version_info < (3, 9):
from typing_extensions import Annotated
else:
from typing import Annotated

import annotated_types as at


class Case(NamedTuple):
"""
A test case for `annotated_types`.
"""

annotation: Any
valid_cases: Iterable[Any]
invalid_cases: Iterable[Any]


def cases() -> Iterable[Case]:
# Gt, Ge, Lt, Le
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(date(2000, 1, 1))],
[date(2000, 1, 2), date(2000, 1, 3)],
[date(2000, 1, 1), date(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(Decimal('1.123'))],
[Decimal('1.1231'), Decimal('123')],
[Decimal('1.123'), Decimal('0')],
)

yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
)

yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)

yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)

# Interval
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
yield Case(
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
)

yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))

# lengths

yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))

yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))

yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))

yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))

# Timezone

yield Case(
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
)
yield Case(
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
)
yield Case(
Annotated[datetime, at.Timezone(timezone.utc)],
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
yield Case(
Annotated[datetime, at.Timezone('Europe/London')],
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)

# Quantity

yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))

# predicate types

yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])

yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])

yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])

# check stacked predicates
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])

# doc
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])

# custom GroupedMetadata
class MyCustomGroupedMetadata(at.GroupedMetadata):
def __iter__(self) -> Iterator[at.Predicate]:
yield at.Predicate(lambda x: float(x).is_integer())

yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])

+ 0
- 1
venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/INSTALLER Ver ficheiro

@@ -1 +0,0 @@
pip

+ 0
- 26
venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/METADATA Ver ficheiro

@@ -1,26 +0,0 @@
Metadata-Version: 2.4
Name: antiorm
Version: 1.2.1
Summary: A Pythonic Helper for DBAPI-2.0 SQL Access
Home-page: http://furius.ca/antiorm
Download-URL: http://bitbucket.org/blais/antiorm
Author: Martin Blais
Author-email: blais@furius.ca
License: GPL
License-File: COPYING
Dynamic: author
Dynamic: author-email
Dynamic: description
Dynamic: download-url
Dynamic: home-page
Dynamic: license
Dynamic: license-file
Dynamic: summary


Anti-ORM is not an ORM, and it certainly does not want to be. Anti-ORM is a
simple Python module that provides a pythonic syntax for making it more
convenient to build SQL queries over the DBAPI-2.0 interface.

In practice, if you're the kind of person that likes it to the bare metal, it's
almost as good as the ORMs. At least there is no magic, and it just works.

+ 0
- 14
venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/RECORD Ver ficheiro

@@ -1,14 +0,0 @@
__pycache__/antiorm.cpython-310.pyc,,
__pycache__/antipool.cpython-310.pyc,,
__pycache__/dbapiext.cpython-310.pyc,,
__pycache__/dbrelmgr.cpython-310.pyc,,
antiorm-1.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
antiorm-1.2.1.dist-info/METADATA,sha256=4n0PUHc8sC4FCmzG3Bk7R7O595PKh6JEaGJ8oV2YN1M,800
antiorm-1.2.1.dist-info/RECORD,,
antiorm-1.2.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
antiorm-1.2.1.dist-info/licenses/COPYING,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
antiorm-1.2.1.dist-info/top_level.txt,sha256=fr5lSEhB4ZT4Fwb7Cd_2fWGdpzpJRU1oCBWIofdBmo0,35
antiorm.py,sha256=PRGjU3UX9P73Orp9L6T4ZmLD8sAYEU0bkzDkciTT1_0,22298
antipool.py,sha256=CrKNw-18ROwSO5osJ2YKYCkoy4plzAgUPAxPxviiTR0,31542
dbapiext.py,sha256=3Yx707S8u0HGvZHtF6rjqJeLDDlHyY3wtv4LvcbGM3s,27889
dbrelmgr.py,sha256=r3q-wA_4WAQWYz1gU_Orm4utvGZaix9q2C_b7FCsGS8,1455

+ 0
- 5
venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/WHEEL Ver ficheiro

@@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: setuptools (80.9.0)
Root-Is-Purelib: true
Tag: py3-none-any


+ 0
- 339
venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/licenses/COPYING Ver ficheiro

@@ -1,339 +0,0 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991

Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.

Preamble

The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.

When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.

To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.

For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.

We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.

Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.

Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.

The precise terms and conditions for copying, distribution and
modification follow.

GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION

0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".

Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.

1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.

You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.

2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:

a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.

b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.

c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)

These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.

Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.

In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.

3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:

a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,

b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,

c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)

The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.

If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.

4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.

5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.

6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.

7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.

If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.

It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.

This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.

8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.

9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.

10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.

NO WARRANTY

11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.

12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.

END OF TERMS AND CONDITIONS

How to Apply These Terms to Your New Programs

If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.

To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.

<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>

This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

Also add information on how to contact you by electronic and paper mail.

If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:

Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.

The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.

You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:

Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.

<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice

This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.

+ 0
- 4
venv/lib/python3.10/site-packages/antiorm-1.2.1.dist-info/top_level.txt Ver ficheiro

@@ -1,4 +0,0 @@
antiorm
antipool
dbapiext
dbrelmgr

+ 0
- 731
venv/lib/python3.10/site-packages/antiorm.py Ver ficheiro

@@ -1,731 +0,0 @@
# -*- coding: iso-8859-1 -*-
# pylint: disable-msg=W0302

"""
An Anti-ORM, a simple utility functions to ease the writing of SQL statements
with Python DBAPI-2.0 bindings. This is not an ORM, but it's just as tasty!

This is a only set of support classes that make it easier to write your own
queries yet have some automation with the annoying tasks of setting up lists of
column names and values, as well as doing the type conversions automatically.

And most importantly...

THERE IS NO FRIGGIN' MAGIC IN HERE.

Some notes:

* You always have to pass in the connection objects that the operations are
performed on. This allows connection pooling to be entirely separate from
this library.

* There is never any automatic commit performed here, you must commit your
connection by yourself after you've executed the appropriate commands.

Usage
=====

Most of the convenience functions accept a WHERE condition and a tuple or list
of arguments, which are simply passed on to the DBAPI interface.


Declaring Tables
----------------

The table must declare the SQL table's name on the 'table' class attribute and
should derive from MormTable.

You do not need to declare columns on your tables. However, if you need custom
conversions--right now, only string vs. unicode are useful--you declare a
'converters' mapping from SQL column name to the converter to be used, just for
the columns which require conversion (you can leave others alone). You can
create your own custom converters if so desired.

The class of objects that are returned by the query methods can be defaulted by
setting 'objcls' on the table. This class should/may derive from MormObject,
e.g.::

class TestTable(MormTable):
table = 'test1'
objcls = Person
converters = {
'firstname': MormConvUnicode(),
'lastname': MormConvUnicode(),
'religion': MormConvString()
}


Insert (C)
----------
Insert some new row in a table::

TestTable.insert(connection,
firstname=u'Adriana',
lastname=u'Sousa',
religion='candomblé')

Select (R)
----------
Add a where condition, and select some columns::

for obj in TestTable.select(connection,
'WHERE id = %s', (2,), cols=('id', 'username')):
# Access obj.id, obj.username

The simplest version is simply accessing everything::

for obj in TestTable.select(connection):
# Access obj.id, obj.username and more.

Update (U)
----------
Update statements are provided as well::

TestTable.update(connection,
'WHERE id = %s', (2,),
lastname=u'Depardieu',
religion='candomblé')

Delete (D)
----------
Deleting rows can be done similarly::

TestTable.delete('WHERE id = %s', (1,))


Lower-Level APIs
----------------

See the tests at the of this file for examples on how to do things at a
lower-level, which is necessary for complex queries (not that it hurts too much
either). In particular, you should have a look at the MormDecoder and
MormEncoder classes.


See doc/ in distribution for additional notes.
"""

__author__ = 'Martin Blais <blais@furius.ca>'


__all__ = ['MormTable', 'MormObject', 'MormError',
'MormConv', 'MormConvUnicode', 'MormConvString',
'MormDecoder', 'MormEncoder']



class NODEF(object):
"""
No-defaults constant.
"""



class MormObject(object):
"""
An instance of an initialized decoded row.
This is just a dummy container for attributes.
"""


class MormTable(object):
"""
Class for declarations that relate to a table.

This acts as the base class on which derived classes add custom conversions.
An instance of this class acts as a wrapper decoder and iterator object,
whose behaviour depends on the custom converters.
"""

#---------------------------------------------------------------------------

table = None
"Table name in the database."

pkseq = None
"Sequence for primary key."

objcls = MormObject
"Class of objects to create"

converters = {}
"Custom converter map for columns"

#---------------------------------------------------------------------------
# Misc methods.

@classmethod
def tname(cls):
assert cls.table is not None
return cls.table

@classmethod
def encoder(cls, **cols):
"""
Encode the given columns according to this class' definition.
"""
return MormEncoder(cls, cols)

@classmethod
def decoder(cls, desc):
"""
Create a decoder for the column names described by 'desc'. 'desc' can
be either a sequence of column names, or a cursor from which we will
fetch the description. You will still have to pass in the cursor for
decoding later on.
"""
return MormDecoder(cls, desc)

#---------------------------------------------------------------------------
# Methods that only read from the connection

@classmethod
def count(cls, conn, cond=None, args=None, distinct=None):
"""
Counts the number of selected rows.
"""
assert conn is not None

# Perform the select.
cursor = MormDecoder.do_select(conn, (cls,), ('1',),
cond, args, distinct)

# Return the number of matches.
return cursor.rowcount

@classmethod
def select(cls, conn, cond=None, args=None, cols=None,
objcls=None, distinct=None):
"""
Convenience method that executes a select and returns an iterator for
the results, wrapped in objects with attributes
"""
assert conn is not None

# Perform the select.
cursor = MormDecoder.do_select(conn, (cls,), cols,
cond, args, distinct)

# Create a decoder using the description on the cursor.
dec = MormDecoder(cls, cursor)

# Return an iterator over the cursor.
return dec.iter(cursor, objcls)

@classmethod
def select_all(cls, conn, cond=None, args=None, cols=None,
objcls=None, distinct=None):
"""
Convenience method that executes a select and returns a list of all the
results, wrapped in objects with attributes
"""
assert conn is not None

# Perform the select.
cursor = MormDecoder.do_select(conn, (cls,), cols,
cond, args, distinct)

# Create a decoder using the description on the cursor.
dec = MormDecoder(cls, cursor)

# Fetch all the objects from the cursor and decode them.
objects = []
for row in cursor.fetchall():
objects.append(dec.decode(row, objcls=objcls))

return objects

@classmethod
def select_one(cls, conn, cond=None, args=None, cols=None,
objcls=None, distinct=None):
"""
Convenience method that executes a select the first object that matches,
and that also checks that there is a single object that matches.
"""
it = cls.select(conn, cond, args, cols, objcls, distinct)
if len(it) > 1:
raise MormError("select_one() matches more than one row.")
try:
o = it.next()
except StopIteration:
o = None
return o

@classmethod
def get(cls, conn, cols=None, default=NODEF, **constraints):
"""
Convenience method that gets a single object by its primary key.
"""
cons, args = [], []
for colname, colvalue in list(constraints.items()):
cons.append('%s = %%s' % colname)
args.append(colvalue)

cond = 'WHERE ' + ' AND '.join(cons)
it = cls.select(conn, cond, args, cols)
try:
if len(it) == 0:
if default is NODEF:
raise MormError("Object not found (%s)." % str(constraints))
else:
return default
return it.next()
finally:
del it

@classmethod
def getsequence(cls, conn, pkseq=None):
"""
Return a sequence number.
This allows us to quickly get the last inserted row id.
"""
if pkseq is None:
pkseq = cls.pkseq
if pkseq is None:
if cls.table is None:
raise MormError("No table specified for "
"getting sequence value")

# By default use PostgreSQL convention.
pkseq = '%s_id_seq' % cls.table

# Run the query.
assert conn
cursor = conn.cursor()

cursor.execute("SELECT currval(%s)", (pkseq,))
seq = cursor.fetchone()[0]

return seq


#---------------------------------------------------------------------------
# Methods that write to the connection

@classmethod
def execute(cls, conn, query, args=None, objcls=None):
"""
Execute an arbitrary read-write SQL statement and return a decoder for
the results.
"""
assert conn
cursor = conn.cursor()
cursor.execute(query, args)
# Get a decoder with the cursor results.
dec = MormDecoder(cls, cursor)

# Return an iterator over the cursor.
return dec.iter(cursor, objcls)

@classmethod
def insert(cls, conn, cond=None, args=None, **fields):
"""
Convenience method that creates an encoder and executes an insert
statement. Returns the encoder.
"""
enc = cls.encoder(**fields)
return enc.insert(conn, cond, args)

@classmethod
def create(cls, conn, cond=None, args=None, pk='id', **fields):
"""
Convenience method that creates an encoder and executes an insert
statement, and then fetches the data back from the database (because of
defaults) and returns the new object.

Note: this assumes that the primary key is composed of a single column.
Note2: this does NOT commit the transaction.
"""
cls.insert(conn, cond, args, **fields)
pkseq = '%s_%s_seq' % (cls.table, pk)
seq = cls.getsequence(conn, pkseq)
return cls.get(conn, **{pk: seq})

@classmethod
def update(cls, conn, cond=None, args=None, **fields):
"""
Convenience method that creates an encoder and executes an update
statement. Returns the encoder.
"""
enc = cls.encoder(**fields)
return enc.update(conn, cond, args)

@classmethod
def delete(cls, conn, cond=None, args=None):
"""
Convenience method that deletes rows with the given condition. WARNING:
if you do not specify any condition, this deletes all the rows in the
table! (just like SQL)
"""
if cond is None:
cond = ''
if args is None:
args = []

# Run the query.
assert conn
cursor = conn.cursor()
cursor.execute("DELETE FROM %s %s" % (cls.table, cond),
list(args))
return cursor



class MormError(Exception):
"""
Error happening in this module.
"""



class MormConv(object):
"""
Base class for all automated type converters.
"""
def from_python(self, value):
"""
Convert value from Python into a type suitable for insertion in a
database query.
"""
return value

def to_python(self, value):
"""
Convert value from the type given by the database connection into a
Python type.
"""
return value



# Encoding from the DBAPI-2.0 client interface.
dbapi_encoding = 'UTF-8'

class MormConvUnicode(MormConv):
"""
Conversion between database-encoded string to unicode type.
"""
def from_python(self, vuni):
if isinstance(vuni, str):
vuni = vuni.decode()
return vuni # Keep as unicode, DBAPI takes care of encoding properly.

def to_python(self, vstr):
if vstr is not None:
return vstr.decode(dbapi_encoding)

class MormConvString(MormConv):
"""
Conversion between database-encoded string to unicode type.
"""
# Default value for the desired encoding for the string.
encoding = 'ISO-8859-1'

def __init__(self, encoding=None):
MormConv.__init__(self)
if encoding:
self.encoding = encoding
self.sameenc = (encoding == dbapi_encoding)

def from_python(self, vuni):
if isinstance(vuni, str):
vuni = vuni.decode(self.encoding)
# Send as unicode, DBAPI takes care of encoding with the appropriate
# client encoding.
return vuni

def to_python(self, vstr):
if vstr is not None:
if self.sameenc:
return vstr
else:
return vstr.decode(dbapi_encoding).encode(self.encoding)



class MormEndecBase(object):
"""
Base class for classes that accept list of tables.
"""
def __init__(self, tables):

# Accept multiple formats for tables list.
self.tables = []
if not isinstance(tables, (tuple, list)):
assert issubclass(tables, MormTable)
tables = (tables,)
for cls in tables:
assert issubclass(cls, MormTable)
self.tables = tuple(tables)
"""Tables is a list of tables that this decoder will use, in order. You
can also pass in a single table class, or a sequence of table"""
assert self.tables

def table(self):
return self.tables[0].tname()

def tablenames(self):
return ','.join(x.tname() for x in self.tables)



class MormDecoder(MormEndecBase):
"""
Decoder class that takes care of creating instances with appropriate
attributes for a specific row.
"""
def __init__(self, tables, desc):
MormEndecBase.__init__(self, tables)

if isinstance(desc, (tuple, list)):
colnames = desc
else:
assert desc is not None
colnames = [x[0] for x in desc.description]

assert colnames
self.colnames = colnames
"""List of column names to restrict decoding.."""

# Note: dotted notation inputs are ignored for now.
#
# if colnames is not None: # Remove dotted notation if present.
# self.colnames = [c.split('.')[-1] for c in colnames]
self.attrnames = dict((c, c.split('.')[-1]) for c in colnames)
assert len(self.attrnames) == len(self.colnames)

def cols(self):
"""
Return a list of field names, suitable for insertion in a query.
"""
return ', '.join(self.colnames)

def decode(self, row, obj=None, objcls=None):
"""
Decode a row.
"""
if len(self.colnames) != len(row):
raise MormError("Row has incorrect length for decoder.")

# Convert all the values right away. We assume that the query is
# minimal and that we're going to need to access all the values.
if obj is None:
if objcls is not None:
# Use the given class if present.
obj = objcls()
else:
# Otherwise look in the list of tables, one-by-one until we find
# an object class to use.
for table in self.tables:
if table.objcls is not None:
obj = table.objcls()
break
else:
# Otherwise just use the default
obj = MormObject()

for cname, cvalue in zip(self.colnames, row):
if '.' in cname:
# Get the table with the matching name and use the converter on
# this table if there is one.
comps = cname.split('.')
tablename, cname = comps[0], comps[-1]
for cls in self.tables:
if cls.tname() == tablename:
converter = cls.converters.get(cname, None)
if converter is not None:
cvalue = converter.to_python(cvalue)
break
else:
# Look in the table list for the first appropriate found
# converter.
for cls in self.tables:
converter = cls.converters.get(cname, None)
if converter is not None:
cvalue = converter.to_python(cvalue)
break

## setattr(obj, self.attrnames[cname], cvalue)
setattr(obj, cname, cvalue)
return obj

def iter(self, cursor, objcls=None):
"""
Create an iterator on the given cursor.
This also deals with the case where a cursor has no results.
"""
if cursor is None:
raise MormError("No cursor to iterate.")
return MormDecoderIterator(self, cursor, objcls)


#---------------------------------------------------------------------------

@staticmethod
def do_select(conn, tables, colnames=None, cond=None, condargs=None,
distinct=None):
"""
Guts of the select methods. You need to pass in a valid connection
'conn'. This returns a new cursor from the given connection.

Note that this method is limited to be able to select on a single table
only. If you want to select on multiple tables at once you will need to
do the select yourself.
"""
tablenames = ','.join(x.tname() for x in tables)

if colnames is None:
colnames = ('*',)

if cond is None:
cond = ''
if condargs is None:
condargs = []
else:
assert isinstance(condargs, (tuple, list, dict))

assert conn is not None

# Run the query.
cursor = conn.cursor()

distinct = distinct and 'DISTINCT' or ''
sql = "SELECT %s %s FROM %s %s" % (distinct, ', '.join(colnames),
tablenames, cond)
cursor.execute(sql, condargs)

return cursor



class MormDecoderIterator(object):
"""
Iterator for a decoder.
"""
def __init__(self, decoder, cursor, objcls=None):
self.decoder = decoder
self.cursor = cursor
self.objcls = objcls

def __len__(self):
return self.cursor.rowcount

def __iter__(self):
return self

def next(self, obj=None, objcls=None):
if self.cursor.rowcount == 0:
raise StopIteration

if objcls is None:
objcls = self.objcls

row = self.cursor.fetchone()
if row is None:
raise StopIteration
else:
return self.decoder.decode(row, obj, objcls)



class MormEncoder(MormEndecBase):
"""
Encoder class. This class converts and contains a set of argument according
to declared table conversions. This is mainly used to create INSERT or
UPDATE statements.
"""
def __init__(self, tables, fields):
MormEndecBase.__init__(self, tables)

self.colnames = []
"""Names of all the columns of the encoder."""

self.colvalues = []
"""Encoded values of all the fields of the encoder."""

# Set column names and values, converting if necessary.
for cname, cvalue in list(fields.items()):
self.colnames.append(cname)

# Apply converter to value if necessary
for cls in self.tables:
converter = cls.converters.get(cname, None)
if converter is not None:
cvalue = converter.from_python(cvalue)
break

self.colvalues.append(cvalue)

def cols(self):
return ', '.join(self.colnames)

def values(self):
"""
Returns the list of converted values.
This is useful to let DBAPI do the automatic quoting.
"""
return self.colvalues

def plhold(self):
"""
Returns a string for holding replacement values in the query string,
e.g.: %s, %s, %s
"""
return ', '.join(['%s'] * len(self.colvalues))

def set(self):
"""
Returns a string for holding 'set values' syntax in the query string,
e.g.: col1 = %s, col2 = %s, col3 = %s
"""
return ', '.join(('%s = %%s' % x) for x in self.colnames)

def insert(self, conn, cond=None, args=None):
"""
Execute a simple insert statement with the contained values. You can
only use this on a single table for now. Note: this does not commit the
connection.
"""
assert len(self.tables) == 1
if cond is None:
cond = ''
if args is None:
args = []

# We must be given a valid connection in 'conn'.
assert conn

# Run the query.
cursor = conn.cursor()

sql = ("INSERT INTO %s (%s) VALUES (%s) %s" %
(self.table(), self.cols(), self.plhold(), cond))
cursor.execute(sql, list(self.values()) + list(args))

return cursor

def update(self, conn, cond=None, args=None):
"""
Execute a simple update statement with the contained values. You can
only use this on a single table for now. Note: this does not commit the
connection. If you supply your own connection, we return the cursor
that we used for the query.
"""
assert len(self.tables) == 1
if cond is None:
cond = ''
if args is None:
args = []

# We must be given a valid connection in 'conn'.
assert conn

# Run the query.
cursor = conn.cursor()

sql = "UPDATE %s SET %s %s" % (self.table(), self.set(), cond)
cursor.execute(sql, list(self.values()) + list(args))

return cursor


+ 0
- 966
venv/lib/python3.10/site-packages/antipool.py Ver ficheiro

@@ -1,966 +0,0 @@
# Copyright (C) 2006 Martin Blais. All Rights Reserved.

"""
An implementation of a DBAPI-2.0 connection pooling system in a multi-threaded
environment.

Initialization
--------------

To use connection pooling, you must first create a connection pool object::

pool = ConnectionPool(dbapi,
database='test',
user='blais')
antipool.initpool(pool)

where 'dbapi' is the module that you want to use that implements the DBAPI-2.0
interface. You need only create a single instance of this object for your
process, and you could make database globally accessible.

Configuration
-------------

The connection pool has a few configuration options. See the constructor's
'options' parameter for details.

.. important::

Important note: By default, a connection is reserved exclusively for
read-only operations. If you are running your program in single-threaded
mode and your code is written properly by discriminating between RO and RW
operations with dbpool().connection() and dbpool().connection_ro(), two
connections will be created! Therefore, if you're running your program in a
single thread, you should always set the 'disable_ro' option to True, to
avoid the extra resource consumption. Single-threaded programs could do with
a single RW connection just fine (unless you're specifying the
'user_readonly' option, this makes no difference).


Acquiring Connections
---------------------

Then, when you want to get a connection to perform some operations on the
database, you call the connection() method and use it the usual DBAPI way::

conn = dbpool().connection()
cursor = conn.cursor()
...
conn.commit()


Read-Only Connections
---------------------

If the connection objects can be shared between threads, the connection pool
allows you to perform an optimization which consists in sharing the connection
between all the threads, for read-only operations. When you know that you will
not need to modify the database for a transaction, get your connection using the
connection_ro() method::

conn = dbpool().connection_ro()
cursor = conn.cursor()
...

Since this will not work for operations that write to the database, you should
NEVER perform inserts, deletes or updates using these special connections. We
do not check the SQL that gets executed, but we specifically do not provide a
commit() method on the connection wrapper so that your code blows up if you try
to commit, which will help you find bugs if you make mistakes with this.

Releasing Connections
---------------------

The connection objects that are provided by the pool are created on demand, and
a goal of the pool is to minimize the amount of resources needed by your
application. The connection objects will normally automatically be released to
the pool once they get collected by your Python interpreter. However, the
Python implementation that you are using may be keeping the connection objects
alive for some time you have finished using them. Therefore, in order to
minimize the number of live connections at any time, you should always release
the connection objects with the release() method after you have finished using
them::

conn = dbpool().connection()
...
...
conn.release()

We recommend using a try-finally form to make it exception-safe::

conn = dbpool().connection()
try:
cursor = conn.cursor()
...
finally:
conn.release()

Note that if you forget to release the connections it does not create a leak, it
only causes a slightly less efficient use of the connection resources. No big
deal.

Using the 'with' statement
--------------------------
::

with conn, cursor = dbpool().connection(1):
...


Convenience for Single Operations with Anti-ORM
-----------------------------------------------

A convenience wrapper object exists for single operations, you can wrap your
antiorm tables with ConnOp(...) and call the same methods on them minus the
connection parameter. The calls automatically acquire and release a connection
object, and commit if relevant.


Forking
-------





Convenience Decorators
~~~~~~~~~~~~~~~~~~~~~~

There are also ``@connected`` and ``@connected_ro`` decorators that can be used
to add a 'conn' parameter to functions, in a spirit similar to ConnOp.

Finalization
------------

On application exit, you should finalize the connection pool explicitly, to
close the database connections still present in the pool::

dbpool().finalize()

It will finalize itself automatically if you forget, but in the interpreter's
finalization stage, which happens in a partially destroyed environment. It is
always safer to finalize explicitly.

Testing
-------

To run a multi-threaded simulation program using this module, just run it
directly. The --debug option provides more verbose output of the connection
pool behaviour.

Supported Databases
-------------------

Currently, we have tested this module with the following databases:

* PostgreSQL (8.x)


"""

__author__ = 'Martin Blais <blais@furius.ca>'
__copyright__ = 'Copyright (C) 2006 Martin Blais. All Rights Reserved.'


# stdlib imports
import os, types, threading, gc, warnings
from datetime import datetime, timedelta


__all__ = ('ConnectionPool', 'Error', 'dbpool', 'ConnOp')


# Create an alias for Python 3.x compatibility
try:
xrange
except NameError:
xrange = range


def dbpool():
"""
Returns the unique database pool for this process. Most often there is only
a single pool per-process, so we provide this function as a global starting
point for getting connections. Use it like this:

from antipool import dbpool
...
conn = dbpool().connection()
...
"""
return _pool_instance

_pool_instance = None

def initpool(pool):
"""
Initialize the connection pool.
You must do this once before you start using the singleton.
"""
global _pool_instance
_pool_instance = pool

#-------------------------------------------------------------------------------
# Support for initializing from the command-line.
def addopts(parser):
"""
Add appropriate options on an optparse parser.
"""
parser.add_option('--database', '--db', action='store',
default=None,
help="Database name")
parser.add_option('--dbuser', action='store',
default=None,
help="Database user")
parser.add_option('--dbpassword', '--dbpass', action='store',
default=None,
help="Database password")
parser.add_option('--dbhost', action='store',
default='localhost',
help="Database hostname")
parser.add_option('--dbport', action='store', type='int',
default=5432,
help="Database port")

def initfromopts(dbapi, opts):
"""
Initialize a global connection pool using the parameters parsed from the
command-line options.
"""
params = {}
for pname, oname in (('database', 'database'),
('user', 'dbuser'),
('password', 'dbpassword'),
('host', 'dbhost'),
('port', 'dbport')):
pvalue = getattr(opts, oname, None)
if pvalue is not None:
params[pname] = pvalue

pool = ConnectionPool(dbapi, **params)
initpool(pool)



class ConnOp(object):
"""
Wrapper class that provides a temporary interface for tables, that
automatically fetches an appropriate connection from the antipool connection
pool, and that automatically releases or commit this connection.
"""

def __init__(self, table):
self.table = table
"""Table object that is being mapped."""

def _run_with_conn_ro(self, funname, *args, **kwds):
"""
Run a read-only operation using a read-only connection object from the
global antipool connection pool.
"""
fun = getattr(self.table, funname)

rv = None

conn = dbpool().connection_ro()
try:
try:
newargs = (conn,) + args
rv = fun(*newargs, **kwds)
except Exception:
conn.rollback()
raise
finally:
conn.release()
return rv

def _run_with_conn(self, funname, *args, **kwds):
"""
Run a read-write operation using a read-only connection object from the
global antipool connection pool.
"""
fun = getattr(self.table, funname)

rv = None

conn = dbpool().connection()
try:
try:
newargs = (conn,) + args
rv = fun(*newargs, **kwds)
except Exception:
conn.rollback()
raise
else:
# Automatically commit.
conn.commit()
finally:
conn.release()
return rv


# Read-only methods.

# Note: we do not provide select on purpose, since the cursor (the fetch
# context) must be maintained afterwards, to fetch the results.

def count(self, *args, **kwds):
return self._run_with_conn_ro('count', *args, **kwds)

def select_all(self, *args, **kwds):
return self._run_with_conn_ro('select_all', *args, **kwds)

def select_one(self, *args, **kwds):
return self._run_with_conn_ro('select_one', *args, **kwds)

def get(self, *args, **kwds):
return self._run_with_conn_ro('get', *args, **kwds)

def getsequence(self, *args, **kwds):
return self._run_with_conn_ro('getsequence', *args, **kwds)

# Read-write methods.

def insert(self, *args, **kwds):
return self._run_with_conn('insert', *args, **kwds)

def create(self, *args, **kwds):
return self._run_with_conn('create', *args, **kwds)

def update(self, *args, **kwds):
return self._run_with_conn('update', *args, **kwds)

def delete(self, *args, **kwds):
return self._run_with_conn('delete', *args, **kwds)


# Decorators

def connected_ro(fun):
"""
Decorator that fetches a connection and that outputs a database error
appropriately. This passed a connection as one of the keyword arguments
under the name 'conn'.
"""
def wfun(*args, **kwds):
conn = dbpool().connection_ro()
try:
assert 'conn' not in kwds
kwds['conn'] = conn
return fun(*args, **kwds)
finally:
conn.release()
return wfun

def connected(fun):
"""
Decorator, similar to connected_ro() but that passes a RW connection and
that commits automatically.

FIXME: we want to make the automatic commit optional.
FIXME: we would like to also ask for some cursors to be automatically passed
ain.
"""
def wfun(*args, **kwds):
conn = dbpool().connection()
try:
assert 'conn' not in kwds
kwds['conn'] = conn
r = fun(*args, **kwds)
conn.commit()
return r
finally:
conn.release()
return wfun




class ConnectionPoolInterface(object):
"""
Interface for a connection pool. This is documentation for the public
interface that you are supposed to use.
"""
def module(self):
"""
Get access to the DBAPI-2.0 module. This is necessary for some of the
standard objects it provides, e.g. Binary().
"""

def connection(self, nbcursors=0, readonly=False):
"""
Acquire a connection for read an write operations.

As a convenience, additionally create a number of cursors and return
them along with the connection, for example::

conn, curs1, curs2 = dbpool.connection(2)

Invoke with readonly=True if you need a read-only connection
(alternatively, you can use the connection_ro() method below).
"""

def connection_ro(self, nbcursors=0):
"""
Acquire a connection for read-only operations.
See connection() for details.
"""

def finalize(self):
"""
Finalize the pool, which closes remaining open connections.
"""



class ConnectionPool(ConnectionPoolInterface):
"""
A pool of database connections that can be shared by a number of threads.
"""

_def_minconn = 5
"""The minimum number of connections to keep around."""

_def_maxconn = None
"""The maximum number of connections to ever allocate (None means that there
is no limit). When the maximum is reached, acquiring a new connection is a
blocking operation."""

_def_minkeepsecs = 5 # seconds
"""The minimum amount of seconds that we should keep connections around
for."""

_def_disable_rollback = False
"""Should we disable the rollback on released connections?"""

def __init__(self, dbapi, options=None, **params):
"""
'dbapi': the DBAPI-2.0 module interface for creating connections.
'minconn': the minimum number of connections to keep around.
'maxconn': the maximum allowed number of connections to the DB.
'debug': flag to enable printing debugging output.
'**params': connection parameters for creating a new connection.
"""
self.dbapi = dbapi
"""The DBAPI-2.0 module interface."""

self._params = params
if not params:
raise Error("You need to specify valid connection parameters in "
"order to creat4e a connection pool.")
"""The parameters for creating a connection."""

self._pool = []
self._pool_lock = threading.Condition(threading.RLock())
"""A pool of database connections and an associated lock for access."""

self._nbconn = 0
"""The total number read-write database connections that were handed
out. This does not include the RO connection, if it is created."""

self._roconn = None
self._roconn_lock = threading.Lock()
self._roconn_refs = 0
"""A connection for read-only access and an associated lock for
creation. We also store the number of references to it that were
handled to clients."""

if options is None:
options = {}

self._debug = options.pop('debug', False)
if self._debug:
assert hasattr(self._debug, 'write')
self._log_lock = threading.Lock()
"""Lock used to serialize debug output between threads."""

disable_ro = options.pop('disable_ro', False)
if not disable_ro and dbapi.threadsafety < 2:
# Note: Configure with disable_ro to remove this warning
# message.
warnings.warn(
"Warning: Your DBAPI module '%s' does not support sharing "
"connections between threads." % str(dbapi))

# Disable the RO connection by force.
disable_ro = True

if disable_ro:
# Disable create the unique RO connection.
self.connection_ro = self._connection_ro_crippled
self._ro_shared = not disable_ro

self._minconn = options.pop('minconn', self._def_minconn)

self._maxconn = options.pop('maxconn', self._def_maxconn)
if self._maxconn is not None:
# Reserve one of the available connections for the RO connection.
if not self._ro_shared:
self._maxconn -= 1
assert self._maxconn > 0

self._minkeepsecs = options.pop('minkeepsecs', self._def_minkeepsecs)

self._disable_rollback = options.pop('disable_rollback',
self._def_disable_rollback)

self._user_ro = options.pop('user_readonly', None)
"""User for read-only connections. You might want to setup different
privileges for that user in your database configuration."""

self._debug_unreleased = options.pop('debug_unreleased', None)
assert (self._debug_unreleased is None or
isinstance(self._debug_unreleased, types.FunctionType))

"""Function to call when the connection wrappers are being closed as a
result of being collected. This is used to trigger some kind of check
when you forget to release some connections explicitly."""

self._isolation_level = options.pop('isolation_level', None)

def ro_shared(self):
"""
Returns true if the read-only connections are shared between the
threads.
"""
return self._ro_shared

def module(self):
"""
(See base class.)
"""
return self.dbapi

def _log(self, msg):
"""
Debugging information logging.
"""
if self._debug:
self._log_lock.acquire()
curthread = threading.currentThread()
self._debug.write(' [%s %s] %s\n' %
(curthread.getName(), os.getpid(), msg))
self._log_lock.release()

def _create_connection(self, read_only):
"""
Create a new connection to the database.
"""
self._log('Connection Create%s' % (read_only and ' (READ ONLY)' or ''))
params = self._params
if read_only and self._user_ro:
params = params.copy()
params['user'] = self._user_ro

newconn = self.dbapi.connect(*(), **params)

# Set the isolation level if specified in the options.
if self._isolation_level is not None:
newconn.set_isolation_level(self._isolation_level)
return newconn

def _close(self, conn):
"""
Close the given connection for the database.
"""
self._log('Connection Close')
return conn.close()

@staticmethod
def _add_cursors(conn_wrapper, nbcursors):
"""
Return an appropriate value depending on the number of cursors requested
for a connection wrapper.
"""
if nbcursors == 0:
return conn_wrapper
else:
r = [conn_wrapper]
for i in xrange(nbcursors):
r.append(conn_wrapper.cursor())
return r

def _get_connection_ro(self):
"""
Acquire a read-only connection.
"""
self._roconn_lock.acquire()
self._log('Acquire RO')
try:
if not self._roconn:
self._roconn = self._create_connection(True)
self._roconn_refs += 1
finally:
self._roconn_lock.release()
return self._roconn

def connection_ro(self, nbcursors=0):
"""
(See base class.)
"""
return self._add_cursors(
ConnectionWrapperRO(self._get_connection_ro(), self), nbcursors)

def _acquire(self):
"""
Acquire a connection from the pool, for read an write operations.

Note that if the maximum number of connections has been reached, this
becomes a blocking operation.
"""
self._pool_lock.acquire()
self._log('Acquire (begin) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))
try:
# Apply maximum number of connections constraint.
if self._maxconn is not None:
# Sanity check.
assert self._nbconn <= self._maxconn

while not self._pool and self._nbconn == self._maxconn:
# Block until a connection is released.
self._log('Acquire (wait) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))
self._pool_lock.wait()
self._log('Acquire (signaled) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))

# Assert that we have a connection in the pool or that we can
# create a new one if needed, i.e. what we waited for just
# before. (This is now a useless sanity check.)
assert self._pool or self._nbconn < self._maxconn

if self._pool:
conn, last_released = self._pool.pop()
else:
# Make sure that we never create a new connection if we have
# reached the maximum.
if self._maxconn is not None:
assert self._nbconn < self._maxconn

conn = self._create_connection(False)
self._nbconn += 1

self._log('Acquire (end ) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))
finally:
self._pool_lock.release()
return conn

def _connection_ro_crippled(self, nbcursors=0):
"""
Replacement for connection_ro() that actually uses the pool to get its
connections. This is used when the dbapi does not allow threads to
share a connection.
"""
conn = self._acquire()
return self._add_cursors(ConnectionWrapperCrippled(conn, self),
nbcursors)

def _get_connection(self):
"""
Acquire a read-write connection.
"""
return self._acquire()

def connection(self, nbcursors=0, readonly=False):
"""
(See base class.)
"""
if readonly:
return self.connection_ro(nbcursors)
return self._add_cursors(
ConnectionWrapper(self._get_connection(), self), nbcursors)

def _release_ro(self, conn):
"""
Release a reference to the read-only connection. You should not use
this directly, you should instead call release() or close() on the
connection object.
"""
self._roconn_lock.acquire()

try:
if conn is self._roconn:
assert self._roconn

self._roconn_refs -= 1
self._log('Release RO')

# Make sure a released connection is not blocking anything else, so
# rollback. Technically this should not block anything, since the
# only operations that are carried out on this connection are RO,
# but we won't risk a deadlock because the user made a programming
# error.
try:
if not self._disable_rollback:
conn.rollback()
except self.dbapi.Error:
# This connection is hosed somehow, we should ditch it.
self._log('Ditching hosed RO connection: %s' % conn)
self._roconn = None
self._roconn_refs = 0
else:
# Ignored the release of other hosed connections.
self._log('Hosed connection %s released after ditched.' % conn)
finally:
self._roconn_lock.release()

def _release(self, conn):
"""
Release a reference to a read-and-write connection.
"""
self._pool_lock.acquire()
try:
self._log('Release (begin) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))

# Make sure a released connection is not blocking anything else.
try:
if not self._disable_rollback:
conn.rollback()
except self.dbapi.Error:
# Oopsy, this connection is hosed somehow. We need to ditch it.
self._log('Ditching hosed connection: %s' % conn)
conn = None
self._nbconn -= 1
return

assert conn is not self._roconn # Sanity check.

self._pool.append( (conn, datetime.now()) )
self._scaledown()
assert self._pool or self._nbconn < self._maxconn
self._log('Release (notify) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))
self._pool_lock.notify()
self._log('Release (notified) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))

self._log('Release (end ) Pool: %d / Created: %s' %
(len(self._pool), self._nbconn))
finally:
self._pool_lock.release()

def _scaledown(self):
"""
Scale down the number of connection according to the following
heuristic: we want keep a minimum number of extra connections in the
pool ready for usage. We delete all connections above that number if
they have last been used beyond a fixed timeout.
"""
self._pool_lock.acquire()
try:
self._log('Scaledown')

# Calculate a recent time limit beyond which we always keep the
# connections.
minkeepsecs = datetime.now() - timedelta(seconds=self._minkeepsecs)

# Calculate the number of connections that we can get rid of.
n = len(self._pool) - self._minconn
if n > 0:
filtered_pool = []
for poolitem in self._pool:
conn, last_released = poolitem
if n > 0 and last_released < minkeepsecs:
self._close(conn)
self._nbconn -= 1
n -= 1
else:
filtered_pool.append(poolitem)
self._pool = filtered_pool
finally:
self._pool_lock.release()

# Note: we could keep the pool sorted by last_released to minimize the
# scaledown time, so that the first items in the pool are always the
# oldest, the most likely to be deletable.

def finalize(self):
"""
Close all the open connections and finalize (prepare for reuse).
"""
# Make sure that all connections lying about are collected before we go
# on.
try:
gc.collect()
except (TypeError, AttributeError):
# We've detected that we're being called in an incomplete
# finalization state, we just bail out, leaving the connections
# to take care of themselves.
return

self._roconn_lock.acquire()
self._pool_lock.acquire()
try:
if not self._pool and not self._roconn:
assert self._nbconn == 0
return # Already finalized.

# Check that all the connections have been returned to us.
assert len(self._pool) == self._nbconn

assert self._roconn_refs == 0
if self._roconn is not None:
self._close(self._roconn)
self._roconn = None

# Release all the read-write pool's connections.
for conn, last_released in self._pool:
self._close(conn)

poolsize = len(self._pool)
self._pool = []

self._log('Finalize Pool: %d / Created: %s' %
(poolsize, self._nbconn))

# Reset statistics.
self._nbconn = 0
finally:
self._roconn_lock.release()
self._pool_lock.release()

def __del__(self):
"""
Destructor.
"""
self.finalize()

def getstats(self):
"""
Return internal statistics. This is used for producing graphs depicting
resource requirements over time. Returns the total number of
connections open (including the RO connection) and the current number of
connections held in the internal pool.
"""
total_conn = 0
self._roconn_lock.acquire()
try:
if self._roconn:
total_conn += 1
finally:
self._roconn_lock.release()

self._pool_lock.acquire()
total_conn += self._nbconn
try:
pool_size = len(self._pool)
finally:
self._pool_lock.release()

return total_conn, pool_size

def forget_connections(self):
"""
Forget all the existing connections and close the sockets. This MUST be
called from a child process right after forking.
"""
self._roconn_lock = threading.Lock()
self._pool_lock = threading.Condition(threading.RLock())

self._roconn = None
self._pool = []
self._nbconn = 0

## FIXME: todo, close the file descriptors (unix ::close()
## FIXME: continue this, you need to fix the test: test_fork.py




class ConnectionWrapperRO(object):
"""
A wrapper object that behaves like a database connection for read-only
operations. You cannot close() this explicitly, you should call release().

Important: you should always try to explicitly release these objects, in
order to minimize the number of open connections in the pool. If you do not
release explicitly, the pool has to keep the connection open. Here is the
preferred way to do this:

connection = dbpool.connection()
try:
# you code here
finally:
connection.release()

Note that this connection wrapper does not allow committing. It is meant
for read-only operations (i.e. SELECT). See class ConnectionWrapper for the
commit method.
"""
def __init__(self, conn, pool):
assert conn
self._conn = conn
self._connpool = pool

def __del__(self):
if self._conn:
unrel = self._connpool._debug_unreleased
if unrel:
unrel(self)
self.release()

def _getconn(self):
if self._conn is None:
raise Error("Error: Connection already closed.")
else:
return self._conn

def release(self):
self._release_impl(self._getconn())
self._connpool = self._conn = None

def _release_impl(self, conn):
self._connpool._release_ro(conn)

def cursor(self, *args, **kw):
return self._getconn().cursor(*args, **kw)

def commit(self):
raise Error("Error: You cannot commit on a read-only connection.")

def rollback(self):
return self._getconn().rollback()

# Support for the context object.

def __enter__(self):
return self

def __exit__(self, exc_type, exc_value, traceback):
self.release()
class ConnectionWrapperCrippled(ConnectionWrapperRO):
"""
A wrapper object that releases to the pool. It still does not provide a
commit() method however.
"""
def _release_impl(self, conn):
self._connpool._release(conn)

class ConnectionWrapper(ConnectionWrapperCrippled):
"""
A wrapper object that allows write operations and provides a commit()
method. See ConnectionWrapperRO for more details.
"""
def commit(self):
return self._getconn().commit()

# Support for the context object.

def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.commit()
else:
self.rollback()
self.release()


class Error(Exception):
"""
Error for connection wrappers.
"""



+ 0
- 1
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/INSTALLER Ver ficheiro

@@ -1 +0,0 @@
pip

+ 0
- 20
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/LICENSE Ver ficheiro

@@ -1,20 +0,0 @@
The MIT License (MIT)

Copyright (c) 2018 Alex Grönholm

Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 0
- 105
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/METADATA Ver ficheiro

@@ -1,105 +0,0 @@
Metadata-Version: 2.2
Name: anyio
Version: 4.9.0
Summary: High level compatibility layer for multiple asynchronous event loop implementations
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
License: MIT
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
Project-URL: Source code, https://github.com/agronholm/anyio
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Framework :: AnyIO
Classifier: Typing :: Typed
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
Requires-Dist: idna>=2.8
Requires-Dist: sniffio>=1.1
Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
Provides-Extra: trio
Requires-Dist: trio>=0.26.1; extra == "trio"
Provides-Extra: test
Requires-Dist: anyio[trio]; extra == "test"
Requires-Dist: blockbuster>=1.5.23; extra == "test"
Requires-Dist: coverage[toml]>=7; extra == "test"
Requires-Dist: exceptiongroup>=1.2.0; extra == "test"
Requires-Dist: hypothesis>=4.0; extra == "test"
Requires-Dist: psutil>=5.9; extra == "test"
Requires-Dist: pytest>=7.0; extra == "test"
Requires-Dist: trustme; extra == "test"
Requires-Dist: truststore>=0.9.1; python_version >= "3.10" and extra == "test"
Requires-Dist: uvloop>=0.21; (platform_python_implementation == "CPython" and platform_system != "Windows" and python_version < "3.14") and extra == "test"
Provides-Extra: doc
Requires-Dist: packaging; extra == "doc"
Requires-Dist: Sphinx~=8.2; extra == "doc"
Requires-Dist: sphinx_rtd_theme; extra == "doc"
Requires-Dist: sphinx-autodoc-typehints>=1.2.0; extra == "doc"

.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/anyio?branch=master
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
:alt: Documentation
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
:target: https://gitter.im/python-trio/AnyIO
:alt: Gitter chat

AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
with the native SC of trio itself.

Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
refactoring necessary. It will blend in with the native libraries of your chosen backend.

Documentation
-------------

View full documentation at: https://anyio.readthedocs.io/

Features
--------

AnyIO offers the following functionality:

* Task groups (nurseries_ in trio terminology)
* High-level networking (TCP, UDP and UNIX sockets)

* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
3.8)
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
Protocols)

* A versatile API for byte streams and object streams
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
streams)
* Worker threads
* Subprocesses
* Asynchronous file I/O (using worker threads)
* Signal handling

AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
It even works with the popular Hypothesis_ library.

.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _trio: https://github.com/python-trio/trio
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
.. _pytest: https://docs.pytest.org/en/latest/
.. _Hypothesis: https://hypothesis.works/

+ 0
- 88
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/RECORD Ver ficheiro

@@ -1,88 +0,0 @@
anyio-4.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
anyio-4.9.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
anyio-4.9.0.dist-info/METADATA,sha256=vvkWPXXTbrpTCFK7zdcYwQcSQhx6Q4qITM9t_PEQCrY,4682
anyio-4.9.0.dist-info/RECORD,,
anyio-4.9.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
anyio-4.9.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
anyio-4.9.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
anyio/__init__.py,sha256=t8bZuNXa5ncwXBaNKbv48BDgZt48RT_zCEtrnPmjNU8,4993
anyio/__pycache__/__init__.cpython-310.pyc,,
anyio/__pycache__/from_thread.cpython-310.pyc,,
anyio/__pycache__/lowlevel.cpython-310.pyc,,
anyio/__pycache__/pytest_plugin.cpython-310.pyc,,
anyio/__pycache__/to_interpreter.cpython-310.pyc,,
anyio/__pycache__/to_process.cpython-310.pyc,,
anyio/__pycache__/to_thread.cpython-310.pyc,,
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_backends/__pycache__/__init__.cpython-310.pyc,,
anyio/_backends/__pycache__/_asyncio.cpython-310.pyc,,
anyio/_backends/__pycache__/_trio.cpython-310.pyc,,
anyio/_backends/_asyncio.py,sha256=AT1oaTfCE-9YFxooMlvld2yDqY5U2A-ANMcBDh9eRfI,93455
anyio/_backends/_trio.py,sha256=HVfDqRGQ7Xj3JfTcYdgzmC7pZEplqU4NOO5kxNNSZnk,40429
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_core/__pycache__/__init__.cpython-310.pyc,,
anyio/_core/__pycache__/_asyncio_selector_thread.cpython-310.pyc,,
anyio/_core/__pycache__/_eventloop.cpython-310.pyc,,
anyio/_core/__pycache__/_exceptions.cpython-310.pyc,,
anyio/_core/__pycache__/_fileio.cpython-310.pyc,,
anyio/_core/__pycache__/_resources.cpython-310.pyc,,
anyio/_core/__pycache__/_signals.cpython-310.pyc,,
anyio/_core/__pycache__/_sockets.cpython-310.pyc,,
anyio/_core/__pycache__/_streams.cpython-310.pyc,,
anyio/_core/__pycache__/_subprocesses.cpython-310.pyc,,
anyio/_core/__pycache__/_synchronization.cpython-310.pyc,,
anyio/_core/__pycache__/_tasks.cpython-310.pyc,,
anyio/_core/__pycache__/_tempfile.cpython-310.pyc,,
anyio/_core/__pycache__/_testing.cpython-310.pyc,,
anyio/_core/__pycache__/_typedattr.cpython-310.pyc,,
anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695
anyio/_core/_exceptions.py,sha256=RlPRlwastdmfDPoskdXNO6SI8_l3fclA2wtW6cokU9I,3503
anyio/_core/_fileio.py,sha256=qFZhkLIz0cGXluvih_vcPUTucgq8UFVgsTCtYbijZIg,23340
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905
anyio/_core/_sockets.py,sha256=5Okc_UThGDEN9KCnsIhqWPRHBNuSy6b4NmG1i51TVF4,27150
anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804
anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
anyio/_core/_synchronization.py,sha256=DwUh8Tl6cG_UMVC_GyzPoC_U9BpfDfjMl9SINSxcZN4,20320
anyio/_core/_tasks.py,sha256=f3CuWwo06cCZ6jaOv-JHFKWkgpgf2cvaF25Oh4augMA,4757
anyio/_core/_tempfile.py,sha256=s-_ucacXbxBH5Bo5eo65lN0lPwZQd5B8yNN_9nARpCM,19696
anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118
anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
anyio/abc/__init__.py,sha256=c2OQbTCS_fQowviMXanLPh8m29ccwkXmpDr7uyNZYOo,2652
anyio/abc/__pycache__/__init__.cpython-310.pyc,,
anyio/abc/__pycache__/_eventloop.cpython-310.pyc,,
anyio/abc/__pycache__/_resources.cpython-310.pyc,,
anyio/abc/__pycache__/_sockets.cpython-310.pyc,,
anyio/abc/__pycache__/_streams.cpython-310.pyc,,
anyio/abc/__pycache__/_subprocesses.cpython-310.pyc,,
anyio/abc/__pycache__/_tasks.cpython-310.pyc,,
anyio/abc/__pycache__/_testing.cpython-310.pyc,,
anyio/abc/_eventloop.py,sha256=UmL8DZCvQTgxzmyBZcGm9kWj9VQY8BMWueLh5S8yWN4,9682
anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262
anyio/abc/_streams.py,sha256=He_JpkAW2g5veOzcUq0XsRC2nId_i35L-d8cs7Uj1ZQ,6598
anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
anyio/abc/_tasks.py,sha256=yJWbMwowvqjlAX4oJ3l9Is1w-zwynr2lX1Z02AWJqsY,3080
anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
anyio/from_thread.py,sha256=MbXHZpgM9wgsRkbGhMNMomEGYj7Y_QYq6a5BZ3c5Ev8,17478
anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/pytest_plugin.py,sha256=qXNwk9Pa7hPQKWocgLl9qijqKGMkGzdH2wJa-jPkGUM,9375
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/streams/__pycache__/__init__.cpython-310.pyc,,
anyio/streams/__pycache__/buffered.cpython-310.pyc,,
anyio/streams/__pycache__/file.cpython-310.pyc,,
anyio/streams/__pycache__/memory.cpython-310.pyc,,
anyio/streams/__pycache__/stapled.cpython-310.pyc,,
anyio/streams/__pycache__/text.cpython-310.pyc,,
anyio/streams/__pycache__/tls.cpython-310.pyc,,
anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500
anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383
anyio/streams/memory.py,sha256=o1OVVx0OooteTTe2GytJreum93Ucuw5s4cAsr3X0-Ag,10560
anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302
anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094
anyio/streams/tls.py,sha256=HxzpVmUgo8SUSIBass_lvef1pAI1uRSrnysM3iEGzl4,13199
anyio/to_interpreter.py,sha256=UhuNCIucCRN7ZtyJg35Mlamzs1JpgDvK4xnL4TDWrAo,6527
anyio/to_process.py,sha256=ZvruelRM-HNmqDaql4sdNODg2QD_uSlwSCxnV4OhsfQ,9595
anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396

+ 0
- 5
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/WHEEL Ver ficheiro

@@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: setuptools (76.0.0)
Root-Is-Purelib: true
Tag: py3-none-any


+ 0
- 2
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/entry_points.txt Ver ficheiro

@@ -1,2 +0,0 @@
[pytest11]
anyio = anyio.pytest_plugin

+ 0
- 1
venv/lib/python3.10/site-packages/anyio-4.9.0.dist-info/top_level.txt Ver ficheiro

@@ -1 +0,0 @@
anyio

+ 0
- 85
venv/lib/python3.10/site-packages/anyio/__init__.py Ver ficheiro

@@ -1,85 +0,0 @@
from __future__ import annotations

from ._core._eventloop import current_time as current_time
from ._core._eventloop import get_all_backends as get_all_backends
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
from ._core._eventloop import run as run
from ._core._eventloop import sleep as sleep
from ._core._eventloop import sleep_forever as sleep_forever
from ._core._eventloop import sleep_until as sleep_until
from ._core._exceptions import BrokenResourceError as BrokenResourceError
from ._core._exceptions import BrokenWorkerIntepreter as BrokenWorkerIntepreter
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
from ._core._exceptions import BusyResourceError as BusyResourceError
from ._core._exceptions import ClosedResourceError as ClosedResourceError
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
from ._core._exceptions import EndOfStream as EndOfStream
from ._core._exceptions import IncompleteRead as IncompleteRead
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
from ._core._exceptions import WouldBlock as WouldBlock
from ._core._fileio import AsyncFile as AsyncFile
from ._core._fileio import Path as Path
from ._core._fileio import open_file as open_file
from ._core._fileio import wrap_file as wrap_file
from ._core._resources import aclose_forcefully as aclose_forcefully
from ._core._signals import open_signal_receiver as open_signal_receiver
from ._core._sockets import connect_tcp as connect_tcp
from ._core._sockets import connect_unix as connect_unix
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
from ._core._sockets import (
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
)
from ._core._sockets import create_tcp_listener as create_tcp_listener
from ._core._sockets import create_udp_socket as create_udp_socket
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
from ._core._sockets import create_unix_listener as create_unix_listener
from ._core._sockets import getaddrinfo as getaddrinfo
from ._core._sockets import getnameinfo as getnameinfo
from ._core._sockets import wait_readable as wait_readable
from ._core._sockets import wait_socket_readable as wait_socket_readable
from ._core._sockets import wait_socket_writable as wait_socket_writable
from ._core._sockets import wait_writable as wait_writable
from ._core._streams import create_memory_object_stream as create_memory_object_stream
from ._core._subprocesses import open_process as open_process
from ._core._subprocesses import run_process as run_process
from ._core._synchronization import CapacityLimiter as CapacityLimiter
from ._core._synchronization import (
CapacityLimiterStatistics as CapacityLimiterStatistics,
)
from ._core._synchronization import Condition as Condition
from ._core._synchronization import ConditionStatistics as ConditionStatistics
from ._core._synchronization import Event as Event
from ._core._synchronization import EventStatistics as EventStatistics
from ._core._synchronization import Lock as Lock
from ._core._synchronization import LockStatistics as LockStatistics
from ._core._synchronization import ResourceGuard as ResourceGuard
from ._core._synchronization import Semaphore as Semaphore
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
from ._core._tasks import CancelScope as CancelScope
from ._core._tasks import create_task_group as create_task_group
from ._core._tasks import current_effective_deadline as current_effective_deadline
from ._core._tasks import fail_after as fail_after
from ._core._tasks import move_on_after as move_on_after
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
from ._core._tempfile import TemporaryFile as TemporaryFile
from ._core._tempfile import gettempdir as gettempdir
from ._core._tempfile import gettempdirb as gettempdirb
from ._core._tempfile import mkdtemp as mkdtemp
from ._core._tempfile import mkstemp as mkstemp
from ._core._testing import TaskInfo as TaskInfo
from ._core._testing import get_current_task as get_current_task
from ._core._testing import get_running_tasks as get_running_tasks
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
from ._core._typedattr import typed_attribute as typed_attribute

# Re-export imports so they look like they live directly in this package
for __value in list(locals().values()):
if getattr(__value, "__module__", "").startswith("anyio."):
__value.__module__ = __name__

del __value

BIN
venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc Ver ficheiro


Alguns ficheiros não foram mostrados porque foram alterados demasiados ficheiros neste diff

Carregando…
Cancelar
Guardar