first commit
commit
83702464be
|
@ -0,0 +1,2 @@
|
||||||
|
__pycache__
|
||||||
|
.DS_Store
|
|
@ -0,0 +1,324 @@
|
||||||
|
import os
|
||||||
|
from os import remove
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
from database.db import engine
|
||||||
|
|
||||||
|
from sqlalchemy import desc,func,case,text
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.sql import exists
|
||||||
|
|
||||||
|
from models.carga_gtfs import CargaGtfs
|
||||||
|
from models.agency import Agency
|
||||||
|
from models.calendar import Calendar
|
||||||
|
from models.route import Route
|
||||||
|
from models.shape_refence import ShapeReference
|
||||||
|
from models.shapes import Shapes
|
||||||
|
from models.stop_times import StopTimes
|
||||||
|
from models.stops import Stop
|
||||||
|
from models.trip import Trip
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def actualizar_estado(id_carga_gtfs:int, estado:str, mensaje:str, db: Session):
|
||||||
|
zip = db.query(CargaGtfs).filter(CargaGtfs.id_carga_gtfs == id_carga_gtfs).first()
|
||||||
|
zip.estado = estado
|
||||||
|
zip.mensaje = mensaje
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
def verificar_columnas(dataframe, columnas_necesarias, archivo, zip, db):
|
||||||
|
columnas_sobrantes = [columna for columna in dataframe.columns if columna not in columnas_necesarias]
|
||||||
|
columnas_faltantes = [columna for columna in columnas_necesarias if columna not in dataframe.columns]
|
||||||
|
|
||||||
|
dataframe_filtrado = dataframe.drop(columnas_sobrantes, axis=1)
|
||||||
|
|
||||||
|
if len(columnas_faltantes) > 0:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje=f"Columnas faltantes en archivo {archivo}, por favor asegúrese de que el archivo contenga las siguientes columnas: {', '.join(columnas_faltantes)}", db=db)
|
||||||
|
raise ValueError(f"Columnas faltantes en archivo {archivo}, por favor asegúrese de que el archivo contenga las siguientes columnas: {', '.join(columnas_faltantes)}")
|
||||||
|
else:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"Archivo '{archivo}' leído y comprobado que contenga todas las columnas necesarias, siguiendo proceso...", db=db)
|
||||||
|
return dataframe_filtrado
|
||||||
|
|
||||||
|
def cargar_datos(df,zip,tabla,db,engine):
|
||||||
|
try:
|
||||||
|
df.to_sql(tabla, engine, if_exists='append', index=False)
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"Archivo '{tabla}.txt' ha sido cargado a la tabla, siguiendo proceso...", db=db)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje=f"Se presentaron problemas con el archivo '{tabla}.txt' al momento de ingresar los datos a la tabla.", db=db)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def extraer_zip_futuro(zip,zip_ruta,db):
|
||||||
|
with zipfile.ZipFile(zip_ruta, 'r') as archivo_zip:
|
||||||
|
archivo_zip.extractall('txts/')
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje="Extrayendo archivos del último ZIP ingresado.", db=db)
|
||||||
|
|
||||||
|
def extraer_zip(zip,zip_ruta,db):
|
||||||
|
with zipfile.ZipFile(zip_ruta, 'r') as archivo_zip:
|
||||||
|
archivo_zip.extractall('txts/')
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje="Extrayendo archivos del último ZIP ingresado.", db=db)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def identificar_zip(db: Session):
|
||||||
|
zip = db.query(CargaGtfs).order_by(desc('fecha')).first()
|
||||||
|
return zip
|
||||||
|
|
||||||
|
def inspeccionar_txts(zip,almacen_txts:str,zip_ruta:str,db: Session):
|
||||||
|
archivos_en_txt = os.listdir(almacen_txts)
|
||||||
|
archivos_esperados = ['agency.txt', 'calendar.txt', 'routes.txt', 'stop_times.txt', 'stops.txt', 'trips.txt', 'shapes.txt']
|
||||||
|
archivos_faltantes = [archivo for archivo in archivos_esperados if archivo not in archivos_en_txt]
|
||||||
|
|
||||||
|
if len(archivos_faltantes) > 0:
|
||||||
|
|
||||||
|
for archivo in archivos_en_txt:
|
||||||
|
archivo_ruta = os.path.join(almacen_txts, archivo)
|
||||||
|
os.remove(archivo_ruta)
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Archivos Faltantes, por favor ingrese los siguientes archivos: "+",".join(archivos_faltantes), db=db)
|
||||||
|
|
||||||
|
remove(zip_ruta)
|
||||||
|
raise ValueError("Archivos Faltantes, por favor ingrese los siguientes archivos: "+",".join(archivos_faltantes))
|
||||||
|
|
||||||
|
for archivo in archivos_en_txt:
|
||||||
|
if archivo not in archivos_esperados:
|
||||||
|
os.remove(almacen_txts+archivo)
|
||||||
|
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje="Total de archivos esperados, siguiendo proceso...", db=db)
|
||||||
|
|
||||||
|
|
||||||
|
def transformar_txts(zip,db: Session):
|
||||||
|
|
||||||
|
id_carga_gtfs = zip.id_carga_gtfs
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_agency = pd.read_csv('txts/agency.txt', dtype=str)
|
||||||
|
columnas_necesarias_agency = ['agency_id','agency_name','agency_url','agency_timezone','agency_lang','agency_phone','agency_fare_url']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo agency.txt a dataframe", db=db)
|
||||||
|
df_agency = verificar_columnas(df_agency, columnas_necesarias_agency, 'agency.txt', zip, db)
|
||||||
|
df_agency['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_calendar = pd.read_csv('txts/calendar.txt', dtype=str)
|
||||||
|
columnas_necesarias_calendar = ['service_id','start_date','end_date','monday','tuesday','wednesday','thursday','friday','saturday','sunday']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo calendar.txt a dataframe", db=db)
|
||||||
|
df_calendar = verificar_columnas(df_calendar, columnas_necesarias_calendar, 'calendar.txt', zip, db)
|
||||||
|
df_calendar['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_routes = pd.read_csv('txts/routes.txt', dtype=str)
|
||||||
|
columnas_necesarias_route = ['route_id','agency_id','route_short_name','route_long_name','route_desc','route_type','route_url','route_color','route_text_color']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo routes.txt a dataframe", db=db)
|
||||||
|
df_routes = verificar_columnas(df_routes, columnas_necesarias_route, 'routes.txt', zip, db)
|
||||||
|
df_routes['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
#Por este caso que leemos todos los campos como str hay que cambiar la id de route a int para que tenga el mismo tipo de dato que la db
|
||||||
|
df_routes['route_id'] = df_routes['route_id'].astype(int)
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_stop_times = pd.read_csv('txts/stop_times.txt', dtype=str)
|
||||||
|
columnas_necesarias_stop_times = ['trip_id','arrival_time','departure_time','stop_id','stop_sequence','stop_headsign','pickup_type','drop_off_type','timepoint']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo stop_times.txt a dataframe", db=db)
|
||||||
|
df_stop_times = verificar_columnas(df_stop_times, columnas_necesarias_stop_times, 'stop_times.txt', zip, db)
|
||||||
|
df_stop_times['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_stops = pd.read_csv('txts/stops.txt', dtype=str)
|
||||||
|
columnas_necesarias_stops = ['stop_id','stop_code','stop_name','stop_desc','stop_lat','stop_lon','zone_id','stop_url','location_type','parent_station','wheelchair_boarding']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo stops.txt a dataframe", db=db)
|
||||||
|
df_stops = verificar_columnas(df_stops, columnas_necesarias_stops, 'stops.txt', zip, db)
|
||||||
|
df_stops['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
df_stops['stop_id_interno'] = range(1, len(df_stops) + 1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_trips = pd.read_csv('txts/trips.txt', dtype=str)
|
||||||
|
columnas_necesarias_trips = ['route_id','service_id','trip_id','trip_headsign','trip_short_name','direction_id','block_id','shape_id','wheelchair_accessible','bikes_allowed']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo trips.txt a dataframe", db=db)
|
||||||
|
df_trips = verificar_columnas(df_trips, columnas_necesarias_trips, 'trips.txt', zip, db)
|
||||||
|
df_trips['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
df_trips.rename(columns={'shape_id': 'shape_reference_id'}, inplace=True)
|
||||||
|
#Por este caso que leemos todos los campos como str hay que cambiar la id de route a int para que tenga el mismo tipo de dato que la db
|
||||||
|
df_trips['route_id'] = df_trips['route_id'].astype(int)
|
||||||
|
|
||||||
|
try:
|
||||||
|
df_shapes = pd.read_csv('txts/shapes.txt', dtype=str)
|
||||||
|
columnas_necesarias_shapes= ['shape_id','shape_pt_sequence','shape_pt_lat','shape_pt_lon']
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje="Se presentaron problemas al momento de transformar el archivo trips.txt a dataframe", db=db)
|
||||||
|
df_shapes = verificar_columnas(df_shapes, columnas_necesarias_shapes, 'shapes.txt', zip, db)
|
||||||
|
df_shapes['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
|
||||||
|
# Se genera el dataframe correspondiente a la tabla shapes_reference.
|
||||||
|
df_shapes_reference = df_shapes[['shape_id']].drop_duplicates().reset_index(drop=True)
|
||||||
|
df_shapes_reference['id_carga_gtfs'] = id_carga_gtfs
|
||||||
|
|
||||||
|
|
||||||
|
return df_agency,df_calendar,df_routes,df_stop_times,df_stops,df_trips,df_shapes,df_shapes_reference
|
||||||
|
|
||||||
|
def verificar_registros_bd(db: Session):
|
||||||
|
tablas = [Agency,Calendar,Route,ShapeReference,Shapes,StopTimes,Stop,Trip]
|
||||||
|
for tabla in tablas:
|
||||||
|
tiene_registros = db.query(exists().where(tabla.id_carga_gtfs.isnot(None))).scalar()
|
||||||
|
if tiene_registros:
|
||||||
|
return False # Si al menos una tabla tiene registros, la base de datos no está vacía
|
||||||
|
return True #Si ninguna tabla tiene registros, la base de datos está vacía
|
||||||
|
|
||||||
|
def primera_carga(zip,db: Session,engine,df_agency,df_calendar,df_routes,df_stop_times,df_stops,df_trips,df_shapes,df_shapes_reference):
|
||||||
|
cargar_datos(df_agency,zip,'agency',db,engine)
|
||||||
|
cargar_datos(df_routes,zip,'route',db,engine)
|
||||||
|
cargar_datos(df_calendar,zip,'calendar',db,engine)
|
||||||
|
cargar_datos(df_stops,zip,'stop',db,engine)
|
||||||
|
cargar_datos(df_shapes_reference,zip,'shape_reference',db,engine)
|
||||||
|
cargar_datos(df_shapes,zip,'shapes', db,engine)
|
||||||
|
cargar_datos(df_trips,zip,'trip',db,engine)
|
||||||
|
cargar_datos(df_stop_times,zip,'stop_times', db,engine)
|
||||||
|
|
||||||
|
def extraer_db(engine):
|
||||||
|
with engine.connect() as connection:
|
||||||
|
df_db_agency = pd.read_sql_table('agency', con=connection)
|
||||||
|
df_db_routes = pd.read_sql_table('route', con=connection)
|
||||||
|
df_db_calendar = pd.read_sql_table('calendar', con=connection)
|
||||||
|
df_db_stops = pd.read_sql_table('stop', con=connection)
|
||||||
|
df_db_shapes_reference = pd.read_sql_table('shape_reference', con=connection)
|
||||||
|
df_db_shapes = pd.read_sql_table('shapes', con=connection)
|
||||||
|
df_db_trips = pd.read_sql_table('trip', con=connection)
|
||||||
|
df_db_stop_times = pd.read_sql_table('stop_times', con=connection)
|
||||||
|
|
||||||
|
return df_db_agency,df_db_routes,df_db_calendar,df_db_stops,df_db_shapes_reference,df_db_shapes,df_db_trips,df_db_stop_times
|
||||||
|
|
||||||
|
|
||||||
|
def eliminar_registros_antiguos(session, df_db, df,modelo,tabla):
|
||||||
|
|
||||||
|
comparar = df_db.merge(df, indicator=True, how='outer')
|
||||||
|
registros_antiguos = comparar.loc[lambda x: x['_merge'] == 'left_only'].drop(columns='_merge')
|
||||||
|
for i, row in registros_antiguos.iterrows():
|
||||||
|
session.query(modelo).filter_by(**{modelo.__table__.primary_key.columns.keys()[0]: row[0]}).delete()
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def eliminar_registros(zip,db,df_db,df,id_tabla,tabla):
|
||||||
|
df_db_sin_id = df_db.drop(columns='id_carga_gtfs')
|
||||||
|
df_sin_id = df.drop(columns='id_carga_gtfs')
|
||||||
|
|
||||||
|
comparar = df_db_sin_id.merge(df_sin_id, indicator=True, how='outer')
|
||||||
|
registros_antiguos = comparar.loc[lambda x: x['_merge'] == 'left_only']
|
||||||
|
|
||||||
|
registros_antiguos['id_carga_gtfs'] = df_db.loc[registros_antiguos.index, 'id_carga_gtfs']
|
||||||
|
registros_antiguos = registros_antiguos.drop(columns='_merge')
|
||||||
|
try:
|
||||||
|
registros_a_eliminar = registros_antiguos[id_tabla].tolist()
|
||||||
|
|
||||||
|
if registros_a_eliminar:
|
||||||
|
query = text(f"DELETE FROM {tabla} WHERE {id_tabla} IN ({', '.join(map(repr, registros_a_eliminar))})")
|
||||||
|
db.execute(query)
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"Se eliminaron los registros obsoletos, no se encontraban en el dataframe '{tabla}' entrante.", db=db)
|
||||||
|
else:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"No se encontraron registros obsoletos para eliminar en la tabla '{tabla}'.", db=db)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje=f"Se presentaron problemas con el al momento de eliminar los registros que no se encontraban en el'{tabla}' entrante.", db=db)
|
||||||
|
return registros_antiguos
|
||||||
|
|
||||||
|
|
||||||
|
def eliminar_registros_stop_time(zip, db, df_db, df, pk1, pk2, tabla):
|
||||||
|
df_db_sin_id = df_db.drop(columns='id_carga_gtfs')
|
||||||
|
df_sin_id = df.drop(columns='id_carga_gtfs')
|
||||||
|
|
||||||
|
comparar = df_db_sin_id.merge(df_sin_id, indicator=True, how='outer')
|
||||||
|
registros_antiguos = comparar.loc[lambda x: x['_merge'] == 'left_only']
|
||||||
|
|
||||||
|
registros_antiguos['id_carga_gtfs'] = df_db.loc[registros_antiguos.index, 'id_carga_gtfs']
|
||||||
|
registros_antiguos = registros_antiguos.drop(columns='_merge')
|
||||||
|
registros_a_eliminar = registros_antiguos[[pk1, pk2]].values.tolist()
|
||||||
|
|
||||||
|
try:
|
||||||
|
registros_a_eliminar_str = ', '.join(f"('{r[0]}', '{r[1]}')" for r in registros_a_eliminar)
|
||||||
|
if registros_a_eliminar:
|
||||||
|
query = text(f"DELETE FROM {tabla} WHERE ({pk1}, {pk2}) IN ({registros_a_eliminar_str})")
|
||||||
|
print(query)
|
||||||
|
db.execute(query, {"registros_a_eliminar": registros_a_eliminar})
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"Se eliminaron los registros obsoletos de la tabla '{tabla}'.", db=db)
|
||||||
|
else:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"No se encontraron registros obsoletos para eliminar en la tabla '{tabla}'.", db=db)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje=f"Hubo un error al eliminar los registros obsoletos de la tabla '{tabla}'.", db=db)
|
||||||
|
|
||||||
|
def agregar_registros(zip,db,df_db,df,tabla):
|
||||||
|
df_db_sin_id = df_db.drop(columns='id_carga_gtfs')
|
||||||
|
df_sin_id = df.drop(columns='id_carga_gtfs')
|
||||||
|
comparar = df_db_sin_id.merge(df_sin_id, indicator=True, how='outer')
|
||||||
|
|
||||||
|
registros_nuevos = comparar.loc[lambda x: x['_merge'] == 'right_only']
|
||||||
|
|
||||||
|
registros_nuevos['id_carga_gtfs'] = df.loc[registros_nuevos.index]['id_carga_gtfs']
|
||||||
|
registros_nuevos = registros_nuevos.drop(columns='_merge')
|
||||||
|
|
||||||
|
try:
|
||||||
|
registros_nuevos.to_sql(tabla, engine, if_exists='append', index=False)
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"Se agregaron los registros nuevos en la tabla de '{tabla}'.", db=db)
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje=f"Se presentaron problemas al momento de agregar registros nuevos en la tabla de '{tabla}'.", db=db)
|
||||||
|
|
||||||
|
return registros_nuevos
|
||||||
|
|
||||||
|
|
||||||
|
def agregar_registros_stops(zip,db,df_db,df,tabla):
|
||||||
|
df_db_sin_id = df_db.drop(columns=['id_carga_gtfs', 'stop_id_interno'])
|
||||||
|
df_sin_id = df.drop(columns='id_carga_gtfs')
|
||||||
|
|
||||||
|
comparar = df_db_sin_id.merge(df_sin_id, on=list(df_db_sin_id.columns), indicator=True, how='outer')
|
||||||
|
registros_nuevos = comparar.loc[lambda x: x['_merge'] == 'right_only']
|
||||||
|
registros_nuevos['id_carga_gtfs'] = df.loc[registros_nuevos.index]['id_carga_gtfs']
|
||||||
|
registros_nuevos = registros_nuevos.drop(columns='_merge')
|
||||||
|
|
||||||
|
max_existing_id = df_db['stop_id_interno'].max()
|
||||||
|
registros_nuevos['stop_id_interno'] = range(max_existing_id + 1, max_existing_id + 1 + len(registros_nuevos))
|
||||||
|
|
||||||
|
try:
|
||||||
|
registros_nuevos.to_sql(tabla, engine, if_exists='append', index=False)
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Procesando", mensaje=f"Se agregaron los registros nuevos en la tabla de '{tabla}'.", db=db)
|
||||||
|
except Exception as e:
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Error", mensaje=f"Se presentaron problemas al momento de agregar registros nuevos en la tabla de '{tabla}'.", db=db)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def verificar_registros_antiguos(zip,db: Session,df_agency,df_calendar,df_routes,df_stop_times,df_stops,df_trips,df_shapes,df_shapes_reference,df_db_agency,df_db_routes,df_db_calendar,df_db_stops,df_db_shapes_reference,df_db_shapes,df_db_trips,df_db_stop_times):
|
||||||
|
eliminar_registros_stop_time(zip,db,df_db_stop_times,df_stop_times,'trip_id','stop_id','stop_times')
|
||||||
|
eliminar_registros(zip,db,df_db_trips,df_trips,'trip_id','trip')
|
||||||
|
eliminar_registros(zip,db,df_db_shapes,df_shapes,'shape_id','shapes')
|
||||||
|
eliminar_registros(zip,db,df_db_shapes_reference,df_shapes_reference,'shape_id','shape_reference')
|
||||||
|
eliminar_registros(zip,db,df_db_calendar,df_calendar,'service_id','calendar')
|
||||||
|
eliminar_registros(zip,db,df_db_routes,df_routes,'route_id','route')
|
||||||
|
eliminar_registros(zip,db,df_db_agency,df_agency,'agency_id','agency')
|
||||||
|
eliminar_registros(zip,db,df_db_stops,df_stops,'stop_id','stop')
|
||||||
|
|
||||||
|
def verificar_registros_nuevos(zip,db: Session,df_agency,df_calendar,df_routes,df_stop_times,df_stops,df_trips,df_shapes,df_shapes_reference,df_db_agency,df_db_routes,df_db_calendar,df_db_stops,df_db_shapes_reference,df_db_shapes,df_db_trips,df_db_stop_times):
|
||||||
|
agregar_registros(zip,db,df_db_agency,df_agency,'agency')
|
||||||
|
agregar_registros(zip,db,df_db_routes,df_routes,'route')
|
||||||
|
agregar_registros(zip,db,df_db_calendar,df_calendar,'calendar')
|
||||||
|
agregar_registros(zip,db,df_db_shapes_reference,df_shapes_reference,'shape_reference')
|
||||||
|
agregar_registros(zip,db,df_db_shapes,df_shapes,'shapes')
|
||||||
|
agregar_registros(zip,db,df_db_trips,df_trips,'trip')
|
||||||
|
agregar_registros_stops(zip,db,df_db_stops,df_stops,'stop')
|
||||||
|
agregar_registros(zip,db,df_db_stop_times,df_stop_times,'stop_times')
|
||||||
|
|
||||||
|
|
||||||
|
def terminar_carga(zip,db,zips_procesados):
|
||||||
|
shutil.make_archive('zips_procesados/'+zip.zip+'_procesado','zip','txts')
|
||||||
|
actualizar_estado(id_carga_gtfs=zip.id_carga_gtfs, estado="Terminado", mensaje='Nuevo archivo ZIP generado, contiene unicamente archivos esperados', db=db)
|
|
@ -0,0 +1,12 @@
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import declarative_base
|
||||||
|
from sqlalchemy.ext.declarative import DeferredReflection
|
||||||
|
|
||||||
|
# Configuración de la base de datos
|
||||||
|
DATABASE_URL = "postgresql://postgres:5314806Jair@localhost:5432/fastApi_gtfs"
|
||||||
|
engine = create_engine(DATABASE_URL)
|
||||||
|
|
||||||
|
# Crea una clase base para tus modelos utilizando DeferredReflection
|
||||||
|
Base = declarative_base(cls=DeferredReflection)
|
||||||
|
Base.metadata.bind = engine
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from router.router import carga_gtfs
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
app.include_router(carga_gtfs)
|
|
@ -0,0 +1,18 @@
|
||||||
|
from sqlalchemy import Table,DateTime, ForeignKey, Column, String, Integer, func, Boolean, Float
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class Agency(Base):
|
||||||
|
__tablename__ = 'agency'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=True)
|
||||||
|
agency_id = Column(String(255), primary_key=True)
|
||||||
|
agency_name = Column(String(255))
|
||||||
|
agency_url = Column(String(255))
|
||||||
|
agency_timezone = Column(String(50))
|
||||||
|
agency_lang = Column(String(10))
|
||||||
|
agency_phone = Column(String(50))
|
||||||
|
agency_fare_url = Column(String(255))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"{self.agency_name}"
|
|
@ -0,0 +1,21 @@
|
||||||
|
from sqlalchemy import ForeignKey, Column, String, Integer
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class Calendar(Base):
|
||||||
|
__tablename__ = 'calendar'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
service_id = Column(String(50), primary_key=True)
|
||||||
|
start_date = Column(String(50))
|
||||||
|
end_date = Column(String(50))
|
||||||
|
monday = Column(String)
|
||||||
|
tuesday = Column(String)
|
||||||
|
wednesday = Column(String)
|
||||||
|
thursday = Column(String)
|
||||||
|
friday = Column(String)
|
||||||
|
saturday = Column(String)
|
||||||
|
sunday = Column(String)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.service_id
|
|
@ -0,0 +1,15 @@
|
||||||
|
from sqlalchemy import DateTime, Column, String, Integer, func
|
||||||
|
from database.db import Base
|
||||||
|
|
||||||
|
class CargaGtfs(Base):
|
||||||
|
__tablename__ = 'carga_gtfs'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, primary_key=True,autoincrement=True)
|
||||||
|
id_usuario = Column(String(255))
|
||||||
|
zip = Column(String(255))
|
||||||
|
fecha = Column(DateTime, server_default=func.now())
|
||||||
|
estado = Column(String(50))
|
||||||
|
mensaje = Column(String(255))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"{self.id_usuario}, {self.zip}, {self.fecha}, {self.estado}, {self.mensaje}"
|
|
@ -0,0 +1,21 @@
|
||||||
|
from sqlalchemy import ForeignKey, Column, String, Integer
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
|
||||||
|
class Route(Base):
|
||||||
|
__tablename__ = 'route'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
route_id = Column(Integer, primary_key=True)
|
||||||
|
agency_id = Column(String(255), ForeignKey('agency.agency_id'), nullable=False)
|
||||||
|
route_short_name = Column(String(255))
|
||||||
|
route_long_name = Column(String(255))
|
||||||
|
route_desc = Column(String(255))
|
||||||
|
route_type = Column(String(50))
|
||||||
|
route_url = Column(String(255))
|
||||||
|
route_color = Column(String(50))
|
||||||
|
route_text_color = Column(String(50))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.route_short_name
|
|
@ -0,0 +1,12 @@
|
||||||
|
from sqlalchemy import ForeignKey, Column, String, Integer
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class ShapeReference(Base):
|
||||||
|
__tablename__ = 'shape_reference'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
shape_id = Column(String(50), primary_key=True)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Shape ID: {self.shape_id}"
|
|
@ -0,0 +1,15 @@
|
||||||
|
from sqlalchemy import ForeignKey, Column, String, Integer
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class Shapes(Base):
|
||||||
|
__tablename__ = 'shapes'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
shape_id = Column(String(50), ForeignKey('shape_reference.shape_id'), primary_key=True)
|
||||||
|
shape_pt_lat = Column(String)
|
||||||
|
shape_pt_lon = Column(String)
|
||||||
|
shape_pt_sequence = Column(String, primary_key=True)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Shape ID: {self.shape_id}, Lat: {self.shape_pt_lat}, Lon: {self.shape_pt_lon}"
|
|
@ -0,0 +1,21 @@
|
||||||
|
from sqlalchemy import Table,DateTime, ForeignKey, Column, String, Integer, func, Boolean, Float
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class StopTimes(Base):
|
||||||
|
__tablename__ = 'stop_times'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
trip_id = Column(String(50), ForeignKey('trip.trip_id'), nullable=False, primary_key=True)
|
||||||
|
arrival_time = Column(String(50))
|
||||||
|
departure_time = Column(String(50))
|
||||||
|
stop_id = Column(String(50), ForeignKey('stop.stop_id'), nullable=False, primary_key=True)
|
||||||
|
stop_sequence = Column(String(50))
|
||||||
|
stop_headsign = Column(String(255))
|
||||||
|
pickup_type = Column(String(50))
|
||||||
|
drop_off_type = Column(String(50))
|
||||||
|
timepoint = Column(String(50))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Trip ID: {self.trip_id}, Stop ID: {self.stop_id}"
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
from sqlalchemy import ForeignKey, Column, String, Integer
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class Stop(Base):
|
||||||
|
__tablename__ = 'stop'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
stop_id_interno = Column(Integer)
|
||||||
|
stop_id = Column(String(50), primary_key=True)
|
||||||
|
stop_code = Column(String(50))
|
||||||
|
stop_name = Column(String(255))
|
||||||
|
stop_desc = Column(String(255))
|
||||||
|
stop_lat = Column(String(50))
|
||||||
|
stop_lon = Column(String(50))
|
||||||
|
zone_id = Column(String(50))
|
||||||
|
stop_url = Column(String(255))
|
||||||
|
location_type = Column(String(50))
|
||||||
|
parent_station = Column(String(50))
|
||||||
|
wheelchair_boarding = Column(String(50))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.stop_name
|
|
@ -0,0 +1,21 @@
|
||||||
|
from sqlalchemy import Table,DateTime, ForeignKey, Column, String, Integer, func, Boolean, Float
|
||||||
|
from database.db import Base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
class Trip(Base):
|
||||||
|
__tablename__ = 'trip'
|
||||||
|
|
||||||
|
id_carga_gtfs = Column(Integer, ForeignKey('carga_gtfs.id_carga_gtfs'), nullable=False)
|
||||||
|
route_id = Column(Integer, ForeignKey('route.route_id'), nullable=False)
|
||||||
|
service_id = Column(String(50), ForeignKey('calendar.service_id'), nullable=False)
|
||||||
|
shape_reference_id = Column(String(50), ForeignKey('shape_reference.shape_id'), nullable=False)
|
||||||
|
trip_id = Column(String(50), primary_key=True)
|
||||||
|
trip_headsign = Column(String(255))
|
||||||
|
trip_short_name = Column(String(255))
|
||||||
|
direction_id = Column(String(50))
|
||||||
|
block_id = Column(String(50))
|
||||||
|
wheelchair_accessible = Column(String(50))
|
||||||
|
bikes_allowed = Column(String(50))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Trip ID: {self.trip_id}, Route ID: {self.route_id}, Service ID: {self.service_id}"
|
|
@ -0,0 +1,94 @@
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from sqlalchemy.orm import sessionmaker,Session
|
||||||
|
from database.db import engine,Base
|
||||||
|
from carga.procesos import extraer_zip_futuro,extraer_zip,identificar_zip,inspeccionar_txts,transformar_txts,verificar_registros_bd,primera_carga,extraer_db,verificar_registros_antiguos,verificar_registros_nuevos,terminar_carga
|
||||||
|
|
||||||
|
Base.prepare(engine)
|
||||||
|
|
||||||
|
def get_db():
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
almacen_zips = 'zip/'
|
||||||
|
almacen_txts = 'txts/'
|
||||||
|
zips_procesados = 'zips_procesados/'
|
||||||
|
|
||||||
|
|
||||||
|
carga_gtfs = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@carga_gtfs.get("/")
|
||||||
|
def root():
|
||||||
|
return "Pagina Principal. Use /docs en la url para probrar la carga."
|
||||||
|
|
||||||
|
@carga_gtfs.post("/carga")
|
||||||
|
async def carga(db: Session = Depends(get_db)):
|
||||||
|
|
||||||
|
zip_info = identificar_zip(db=db)
|
||||||
|
zip_ruta = almacen_zips + zip_info.zip + ".zip"
|
||||||
|
|
||||||
|
extraer_zip(zip=zip_info,zip_ruta=zip_ruta,db=db)
|
||||||
|
inspeccionar_txts(zip=zip_info,almacen_txts=almacen_txts,zip_ruta=zip_ruta,db=db)
|
||||||
|
df_agency, df_calendar, df_routes,df_stop_times, df_stops, df_trips, df_shapes, df_shapes_reference = transformar_txts(zip=zip_info, db=db)
|
||||||
|
bd_vacia = verificar_registros_bd(db=db)
|
||||||
|
if bd_vacia:
|
||||||
|
primera_carga(zip=zip_info,
|
||||||
|
engine=engine,
|
||||||
|
df_agency=df_agency,
|
||||||
|
df_calendar=df_calendar,
|
||||||
|
df_routes=df_routes,
|
||||||
|
df_stop_times=df_stop_times,
|
||||||
|
df_stops=df_stops,
|
||||||
|
df_trips=df_trips,
|
||||||
|
df_shapes=df_shapes,
|
||||||
|
df_shapes_reference=df_shapes_reference,
|
||||||
|
db=db)
|
||||||
|
else:
|
||||||
|
df_db_agency,df_db_routes,df_db_calendar,df_db_stops,df_db_shapes_reference,df_db_shapes,df_db_trips,df_db_stop_times = extraer_db(engine=engine)
|
||||||
|
|
||||||
|
verificar_registros_antiguos(zip=zip_info,
|
||||||
|
df_agency=df_agency,
|
||||||
|
df_calendar=df_calendar,
|
||||||
|
df_routes=df_routes,
|
||||||
|
df_stop_times=df_stop_times,
|
||||||
|
df_stops=df_stops,
|
||||||
|
df_trips=df_trips,
|
||||||
|
df_shapes=df_shapes,
|
||||||
|
df_shapes_reference=df_shapes_reference,
|
||||||
|
df_db_agency=df_db_agency,
|
||||||
|
df_db_routes=df_db_routes,
|
||||||
|
df_db_calendar=df_db_calendar,
|
||||||
|
df_db_stops=df_db_stops,
|
||||||
|
df_db_shapes_reference=df_db_shapes_reference,
|
||||||
|
df_db_shapes=df_db_shapes,
|
||||||
|
df_db_trips=df_db_trips,
|
||||||
|
df_db_stop_times=df_db_stop_times,
|
||||||
|
db=db)
|
||||||
|
|
||||||
|
verificar_registros_nuevos(zip=zip_info,
|
||||||
|
df_agency=df_agency,
|
||||||
|
df_calendar=df_calendar,
|
||||||
|
df_routes=df_routes,
|
||||||
|
df_stop_times=df_stop_times,
|
||||||
|
df_stops=df_stops,
|
||||||
|
df_trips=df_trips,
|
||||||
|
df_shapes=df_shapes,
|
||||||
|
df_shapes_reference=df_shapes_reference,
|
||||||
|
df_db_agency=df_db_agency,
|
||||||
|
df_db_routes=df_db_routes,
|
||||||
|
df_db_calendar=df_db_calendar,
|
||||||
|
df_db_stops=df_db_stops,
|
||||||
|
df_db_shapes_reference=df_db_shapes_reference,
|
||||||
|
df_db_shapes=df_db_shapes,
|
||||||
|
df_db_trips=df_db_trips,
|
||||||
|
df_db_stop_times=df_db_stop_times,
|
||||||
|
db=db)
|
||||||
|
|
||||||
|
terminar_carga(zip=zip_info,zips_procesados=zips_procesados,db=db)
|
||||||
|
|
||||||
|
return "Proceso Finalizado"
|
|
@ -0,0 +1,247 @@
|
||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Activate a Python virtual environment for the current PowerShell session.
|
||||||
|
|
||||||
|
.Description
|
||||||
|
Pushes the python executable for a virtual environment to the front of the
|
||||||
|
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||||
|
in a Python virtual environment. Makes use of the command line switches as
|
||||||
|
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||||
|
|
||||||
|
.Parameter VenvDir
|
||||||
|
Path to the directory that contains the virtual environment to activate. The
|
||||||
|
default value for this is the parent of the directory that the Activate.ps1
|
||||||
|
script is located within.
|
||||||
|
|
||||||
|
.Parameter Prompt
|
||||||
|
The prompt prefix to display when this virtual environment is activated. By
|
||||||
|
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||||
|
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Verbose
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and shows extra information about the activation as it executes.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||||
|
Activates the Python virtual environment located in the specified location.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Prompt "MyPython"
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and prefixes the current prompt with the specified string (surrounded in
|
||||||
|
parentheses) while the virtual environment is active.
|
||||||
|
|
||||||
|
.Notes
|
||||||
|
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||||
|
execution policy for the user. You can do this by issuing the following PowerShell
|
||||||
|
command:
|
||||||
|
|
||||||
|
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||||
|
|
||||||
|
For more information on Execution Policies:
|
||||||
|
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||||
|
|
||||||
|
#>
|
||||||
|
Param(
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$VenvDir,
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$Prompt
|
||||||
|
)
|
||||||
|
|
||||||
|
<# Function declarations --------------------------------------------------- #>
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Remove all shell session elements added by the Activate script, including the
|
||||||
|
addition of the virtual environment's Python executable from the beginning of
|
||||||
|
the PATH variable.
|
||||||
|
|
||||||
|
.Parameter NonDestructive
|
||||||
|
If present, do not remove this function from the global namespace for the
|
||||||
|
session.
|
||||||
|
|
||||||
|
#>
|
||||||
|
function global:deactivate ([switch]$NonDestructive) {
|
||||||
|
# Revert to original values
|
||||||
|
|
||||||
|
# The prior prompt:
|
||||||
|
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||||
|
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||||
|
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PYTHONHOME:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PATH:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the VIRTUAL_ENV altogether:
|
||||||
|
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||||
|
Remove-Item -Path env:VIRTUAL_ENV
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||||
|
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||||
|
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||||
|
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||||
|
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||||
|
}
|
||||||
|
|
||||||
|
# Leave deactivate function in the global namespace if requested:
|
||||||
|
if (-not $NonDestructive) {
|
||||||
|
Remove-Item -Path function:deactivate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Description
|
||||||
|
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||||
|
given folder, and returns them in a map.
|
||||||
|
|
||||||
|
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||||
|
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||||
|
then it is considered a `key = value` line. The left hand string is the key,
|
||||||
|
the right hand is the value.
|
||||||
|
|
||||||
|
If the value starts with a `'` or a `"` then the first and last character is
|
||||||
|
stripped from the value before being captured.
|
||||||
|
|
||||||
|
.Parameter ConfigDir
|
||||||
|
Path to the directory that contains the `pyvenv.cfg` file.
|
||||||
|
#>
|
||||||
|
function Get-PyVenvConfig(
|
||||||
|
[String]
|
||||||
|
$ConfigDir
|
||||||
|
) {
|
||||||
|
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||||
|
|
||||||
|
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||||
|
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||||
|
|
||||||
|
# An empty map will be returned if no config file is found.
|
||||||
|
$pyvenvConfig = @{ }
|
||||||
|
|
||||||
|
if ($pyvenvConfigPath) {
|
||||||
|
|
||||||
|
Write-Verbose "File exists, parse `key = value` lines"
|
||||||
|
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||||
|
|
||||||
|
$pyvenvConfigContent | ForEach-Object {
|
||||||
|
$keyval = $PSItem -split "\s*=\s*", 2
|
||||||
|
if ($keyval[0] -and $keyval[1]) {
|
||||||
|
$val = $keyval[1]
|
||||||
|
|
||||||
|
# Remove extraneous quotations around a string value.
|
||||||
|
if ("'""".Contains($val.Substring(0, 1))) {
|
||||||
|
$val = $val.Substring(1, $val.Length - 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
$pyvenvConfig[$keyval[0]] = $val
|
||||||
|
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return $pyvenvConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
<# Begin Activate script --------------------------------------------------- #>
|
||||||
|
|
||||||
|
# Determine the containing directory of this script
|
||||||
|
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||||
|
|
||||||
|
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||||
|
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||||
|
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||||
|
|
||||||
|
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||||
|
# First, get the location of the virtual environment, it might not be
|
||||||
|
# VenvExecDir if specified on the command line.
|
||||||
|
if ($VenvDir) {
|
||||||
|
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||||
|
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||||
|
Write-Verbose "VenvDir=$VenvDir"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||||
|
# as `prompt`.
|
||||||
|
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||||
|
|
||||||
|
# Next, set the prompt from the command line, or the config file, or
|
||||||
|
# just use the name of the virtual environment folder.
|
||||||
|
if ($Prompt) {
|
||||||
|
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||||
|
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||||
|
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||||
|
$Prompt = $pyvenvCfg['prompt'];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||||
|
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||||
|
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Verbose "Prompt = '$Prompt'"
|
||||||
|
Write-Verbose "VenvDir='$VenvDir'"
|
||||||
|
|
||||||
|
# Deactivate any currently active virtual environment, but leave the
|
||||||
|
# deactivate function in place.
|
||||||
|
deactivate -nondestructive
|
||||||
|
|
||||||
|
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||||
|
# that there is an activated venv.
|
||||||
|
$env:VIRTUAL_ENV = $VenvDir
|
||||||
|
|
||||||
|
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||||
|
|
||||||
|
Write-Verbose "Setting prompt to '$Prompt'"
|
||||||
|
|
||||||
|
# Set the prompt to include the env name
|
||||||
|
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||||
|
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||||
|
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||||
|
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||||
|
|
||||||
|
function global:prompt {
|
||||||
|
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||||
|
_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clear PYTHONHOME
|
||||||
|
if (Test-Path -Path Env:PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
Remove-Item -Path Env:PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add the venv to the PATH
|
||||||
|
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||||
|
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
|
@ -0,0 +1,69 @@
|
||||||
|
# This file must be used with "source bin/activate" *from bash*
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
deactivate () {
|
||||||
|
# reset old environment variables
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||||
|
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||||
|
export PATH
|
||||||
|
unset _OLD_VIRTUAL_PATH
|
||||||
|
fi
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||||
|
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||||
|
export PYTHONHOME
|
||||||
|
unset _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r 2> /dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||||
|
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||||
|
export PS1
|
||||||
|
unset _OLD_VIRTUAL_PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
unset VIRTUAL_ENV
|
||||||
|
unset VIRTUAL_ENV_PROMPT
|
||||||
|
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||||
|
# Self destruct!
|
||||||
|
unset -f deactivate
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
VIRTUAL_ENV="/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv"
|
||||||
|
export VIRTUAL_ENV
|
||||||
|
|
||||||
|
_OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
export PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||||
|
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||||
|
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||||
|
unset PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||||
|
PS1="(venv) ${PS1:-}"
|
||||||
|
export PS1
|
||||||
|
VIRTUAL_ENV_PROMPT="(venv) "
|
||||||
|
export VIRTUAL_ENV_PROMPT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r 2> /dev/null
|
||||||
|
fi
|
|
@ -0,0 +1,26 @@
|
||||||
|
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||||
|
# You cannot run it directly.
|
||||||
|
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||||
|
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
|
||||||
|
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
setenv VIRTUAL_ENV "/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv"
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||||
|
|
||||||
|
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||||
|
set prompt = "(venv) $prompt"
|
||||||
|
setenv VIRTUAL_ENV_PROMPT "(venv) "
|
||||||
|
endif
|
||||||
|
|
||||||
|
alias pydoc python -m pydoc
|
||||||
|
|
||||||
|
rehash
|
|
@ -0,0 +1,66 @@
|
||||||
|
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||||
|
# (https://fishshell.com/); you cannot run it directly.
|
||||||
|
|
||||||
|
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||||
|
# reset old environment variables
|
||||||
|
if test -n "$_OLD_VIRTUAL_PATH"
|
||||||
|
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||||
|
set -e _OLD_VIRTUAL_PATH
|
||||||
|
end
|
||||||
|
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||||
|
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||||
|
functions -e fish_prompt
|
||||||
|
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||||
|
functions -c _old_fish_prompt fish_prompt
|
||||||
|
functions -e _old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -e VIRTUAL_ENV
|
||||||
|
set -e VIRTUAL_ENV_PROMPT
|
||||||
|
if test "$argv[1]" != "nondestructive"
|
||||||
|
# Self-destruct!
|
||||||
|
functions -e deactivate
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
set -gx VIRTUAL_ENV "/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv"
|
||||||
|
|
||||||
|
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||||
|
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||||
|
|
||||||
|
# Unset PYTHONHOME if set.
|
||||||
|
if set -q PYTHONHOME
|
||||||
|
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||||
|
set -e PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||||
|
# fish uses a function instead of an env var to generate the prompt.
|
||||||
|
|
||||||
|
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||||
|
functions -c fish_prompt _old_fish_prompt
|
||||||
|
|
||||||
|
# With the original prompt function renamed, we can override with our own.
|
||||||
|
function fish_prompt
|
||||||
|
# Save the return status of the last command.
|
||||||
|
set -l old_status $status
|
||||||
|
|
||||||
|
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||||
|
printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
|
||||||
|
|
||||||
|
# Restore the return status of the previous command.
|
||||||
|
echo "exit $old_status" | .
|
||||||
|
# Output the original/"old" prompt.
|
||||||
|
_old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||||
|
set -gx VIRTUAL_ENV_PROMPT "(venv) "
|
||||||
|
end
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from numpy.f2py.f2py2e import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from numpy.f2py.f2py2e import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from numpy.f2py.f2py2e import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1 @@
|
||||||
|
python3
|
|
@ -0,0 +1 @@
|
||||||
|
/Users/abner/.pyenv/versions/3.10.1/bin/python3
|
|
@ -0,0 +1 @@
|
||||||
|
python3
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/Users/abner/Desktop/proyecto_titulo/cargaAPI/venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from uvicorn.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright 2005-2023 SQLAlchemy authors and contributors <see AUTHORS file>.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
|
@ -0,0 +1,238 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: SQLAlchemy
|
||||||
|
Version: 2.0.19
|
||||||
|
Summary: Database Abstraction Library
|
||||||
|
Home-page: https://www.sqlalchemy.org
|
||||||
|
Author: Mike Bayer
|
||||||
|
Author-email: mike_mp@zzzcomputing.com
|
||||||
|
License: MIT
|
||||||
|
Project-URL: Documentation, https://docs.sqlalchemy.org
|
||||||
|
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Topic :: Database :: Front-Ends
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: typing-extensions (>=4.2.0)
|
||||||
|
Requires-Dist: greenlet (!=0.4.17) ; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))
|
||||||
|
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||||
|
Provides-Extra: aiomysql
|
||||||
|
Requires-Dist: greenlet (!=0.4.17) ; extra == 'aiomysql'
|
||||||
|
Requires-Dist: aiomysql ; extra == 'aiomysql'
|
||||||
|
Provides-Extra: aiosqlite
|
||||||
|
Requires-Dist: greenlet (!=0.4.17) ; extra == 'aiosqlite'
|
||||||
|
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
|
||||||
|
Requires-Dist: typing-extensions (!=3.10.0.1) ; extra == 'aiosqlite'
|
||||||
|
Provides-Extra: asyncio
|
||||||
|
Requires-Dist: greenlet (!=0.4.17) ; extra == 'asyncio'
|
||||||
|
Provides-Extra: asyncmy
|
||||||
|
Requires-Dist: greenlet (!=0.4.17) ; extra == 'asyncmy'
|
||||||
|
Requires-Dist: asyncmy (!=0.2.4,!=0.2.6,>=0.2.3) ; extra == 'asyncmy'
|
||||||
|
Provides-Extra: mariadb_connector
|
||||||
|
Requires-Dist: mariadb (!=1.1.2,!=1.1.5,>=1.0.1) ; extra == 'mariadb_connector'
|
||||||
|
Provides-Extra: mssql
|
||||||
|
Requires-Dist: pyodbc ; extra == 'mssql'
|
||||||
|
Provides-Extra: mssql_pymssql
|
||||||
|
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
|
||||||
|
Provides-Extra: mssql_pyodbc
|
||||||
|
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
|
||||||
|
Provides-Extra: mypy
|
||||||
|
Requires-Dist: mypy (>=0.910) ; extra == 'mypy'
|
||||||
|
Provides-Extra: mysql
|
||||||
|
Requires-Dist: mysqlclient (>=1.4.0) ; extra == 'mysql'
|
||||||
|
Provides-Extra: mysql_connector
|
||||||
|
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
|
||||||
|
Provides-Extra: oracle
|
||||||
|
Requires-Dist: cx-oracle (>=7) ; extra == 'oracle'
|
||||||
|
Provides-Extra: oracle_oracledb
|
||||||
|
Requires-Dist: oracledb (>=1.0.1) ; extra == 'oracle_oracledb'
|
||||||
|
Provides-Extra: postgresql
|
||||||
|
Requires-Dist: psycopg2 (>=2.7) ; extra == 'postgresql'
|
||||||
|
Provides-Extra: postgresql_asyncpg
|
||||||
|
Requires-Dist: greenlet (!=0.4.17) ; extra == 'postgresql_asyncpg'
|
||||||
|
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
|
||||||
|
Provides-Extra: postgresql_pg8000
|
||||||
|
Requires-Dist: pg8000 (>=1.29.1) ; extra == 'postgresql_pg8000'
|
||||||
|
Provides-Extra: postgresql_psycopg
|
||||||
|
Requires-Dist: psycopg (>=3.0.7) ; extra == 'postgresql_psycopg'
|
||||||
|
Provides-Extra: postgresql_psycopg2binary
|
||||||
|
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
|
||||||
|
Provides-Extra: postgresql_psycopg2cffi
|
||||||
|
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
|
||||||
|
Provides-Extra: postgresql_psycopgbinary
|
||||||
|
Requires-Dist: psycopg[binary] (>=3.0.7) ; extra == 'postgresql_psycopgbinary'
|
||||||
|
Provides-Extra: pymysql
|
||||||
|
Requires-Dist: pymysql ; extra == 'pymysql'
|
||||||
|
Provides-Extra: sqlcipher
|
||||||
|
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
|
||||||
|
|
||||||
|
SQLAlchemy
|
||||||
|
==========
|
||||||
|
|
||||||
|
|PyPI| |Python| |Downloads|
|
||||||
|
|
||||||
|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
|
||||||
|
:target: https://pypi.org/project/sqlalchemy
|
||||||
|
:alt: PyPI
|
||||||
|
|
||||||
|
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
|
||||||
|
:target: https://pypi.org/project/sqlalchemy
|
||||||
|
:alt: PyPI - Python Version
|
||||||
|
|
||||||
|
.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy
|
||||||
|
:target: https://pypi.org/project/sqlalchemy
|
||||||
|
:alt: PyPI - Downloads
|
||||||
|
|
||||||
|
|
||||||
|
The Python SQL Toolkit and Object Relational Mapper
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
-------------
|
||||||
|
|
||||||
|
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
|
||||||
|
that gives application developers the full power and
|
||||||
|
flexibility of SQL. SQLAlchemy provides a full suite
|
||||||
|
of well known enterprise-level persistence patterns,
|
||||||
|
designed for efficient and high-performing database
|
||||||
|
access, adapted into a simple and Pythonic domain
|
||||||
|
language.
|
||||||
|
|
||||||
|
Major SQLAlchemy features include:
|
||||||
|
|
||||||
|
* An industrial strength ORM, built
|
||||||
|
from the core on the identity map, unit of work,
|
||||||
|
and data mapper patterns. These patterns
|
||||||
|
allow transparent persistence of objects
|
||||||
|
using a declarative configuration system.
|
||||||
|
Domain models
|
||||||
|
can be constructed and manipulated naturally,
|
||||||
|
and changes are synchronized with the
|
||||||
|
current transaction automatically.
|
||||||
|
* A relationally-oriented query system, exposing
|
||||||
|
the full range of SQL's capabilities
|
||||||
|
explicitly, including joins, subqueries,
|
||||||
|
correlation, and most everything else,
|
||||||
|
in terms of the object model.
|
||||||
|
Writing queries with the ORM uses the same
|
||||||
|
techniques of relational composition you use
|
||||||
|
when writing SQL. While you can drop into
|
||||||
|
literal SQL at any time, it's virtually never
|
||||||
|
needed.
|
||||||
|
* A comprehensive and flexible system
|
||||||
|
of eager loading for related collections and objects.
|
||||||
|
Collections are cached within a session,
|
||||||
|
and can be loaded on individual access, all
|
||||||
|
at once using joins, or by query per collection
|
||||||
|
across the full result set.
|
||||||
|
* A Core SQL construction system and DBAPI
|
||||||
|
interaction layer. The SQLAlchemy Core is
|
||||||
|
separate from the ORM and is a full database
|
||||||
|
abstraction layer in its own right, and includes
|
||||||
|
an extensible Python-based SQL expression
|
||||||
|
language, schema metadata, connection pooling,
|
||||||
|
type coercion, and custom types.
|
||||||
|
* All primary and foreign key constraints are
|
||||||
|
assumed to be composite and natural. Surrogate
|
||||||
|
integer primary keys are of course still the
|
||||||
|
norm, but SQLAlchemy never assumes or hardcodes
|
||||||
|
to this model.
|
||||||
|
* Database introspection and generation. Database
|
||||||
|
schemas can be "reflected" in one step into
|
||||||
|
Python structures representing database metadata;
|
||||||
|
those same structures can then generate
|
||||||
|
CREATE statements right back out - all within
|
||||||
|
the Core, independent of the ORM.
|
||||||
|
|
||||||
|
SQLAlchemy's philosophy:
|
||||||
|
|
||||||
|
* SQL databases behave less and less like object
|
||||||
|
collections the more size and performance start to
|
||||||
|
matter; object collections behave less and less like
|
||||||
|
tables and rows the more abstraction starts to matter.
|
||||||
|
SQLAlchemy aims to accommodate both of these
|
||||||
|
principles.
|
||||||
|
* An ORM doesn't need to hide the "R". A relational
|
||||||
|
database provides rich, set-based functionality
|
||||||
|
that should be fully exposed. SQLAlchemy's
|
||||||
|
ORM provides an open-ended set of patterns
|
||||||
|
that allow a developer to construct a custom
|
||||||
|
mediation layer between a domain model and
|
||||||
|
a relational schema, turning the so-called
|
||||||
|
"object relational impedance" issue into
|
||||||
|
a distant memory.
|
||||||
|
* The developer, in all cases, makes all decisions
|
||||||
|
regarding the design, structure, and naming conventions
|
||||||
|
of both the object model as well as the relational
|
||||||
|
schema. SQLAlchemy only provides the means
|
||||||
|
to automate the execution of these decisions.
|
||||||
|
* With SQLAlchemy, there's no such thing as
|
||||||
|
"the ORM generated a bad query" - you
|
||||||
|
retain full control over the structure of
|
||||||
|
queries, including how joins are organized,
|
||||||
|
how subqueries and correlation is used, what
|
||||||
|
columns are requested. Everything SQLAlchemy
|
||||||
|
does is ultimately the result of a developer-initiated
|
||||||
|
decision.
|
||||||
|
* Don't use an ORM if the problem doesn't need one.
|
||||||
|
SQLAlchemy consists of a Core and separate ORM
|
||||||
|
component. The Core offers a full SQL expression
|
||||||
|
language that allows Pythonic construction
|
||||||
|
of SQL constructs that render directly to SQL
|
||||||
|
strings for a target database, returning
|
||||||
|
result sets that are essentially enhanced DBAPI
|
||||||
|
cursors.
|
||||||
|
* Transactions should be the norm. With SQLAlchemy's
|
||||||
|
ORM, nothing goes to permanent storage until
|
||||||
|
commit() is called. SQLAlchemy encourages applications
|
||||||
|
to create a consistent means of delineating
|
||||||
|
the start and end of a series of operations.
|
||||||
|
* Never render a literal value in a SQL statement.
|
||||||
|
Bound parameters are used to the greatest degree
|
||||||
|
possible, allowing query optimizers to cache
|
||||||
|
query plans effectively and making SQL injection
|
||||||
|
attacks a non-issue.
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Latest documentation is at:
|
||||||
|
|
||||||
|
https://www.sqlalchemy.org/docs/
|
||||||
|
|
||||||
|
Installation / Requirements
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Full documentation for installation is at
|
||||||
|
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
|
||||||
|
|
||||||
|
Getting Help / Development / Bug reporting
|
||||||
|
------------------------------------------
|
||||||
|
|
||||||
|
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
|
||||||
|
|
||||||
|
Code of Conduct
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
|
||||||
|
constructive communication between users and developers.
|
||||||
|
Please see our current Code of Conduct at
|
||||||
|
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
SQLAlchemy is distributed under the `MIT license
|
||||||
|
<https://www.opensource.org/licenses/mit-license.php>`_.
|
||||||
|
|
|
@ -0,0 +1,524 @@
|
||||||
|
SQLAlchemy-2.0.19.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
SQLAlchemy-2.0.19.dist-info/LICENSE,sha256=2lSTeluT1aC-5eJXO8vhkzf93qCSeV_mFXLrv3tNdIU,1100
|
||||||
|
SQLAlchemy-2.0.19.dist-info/METADATA,sha256=y3sAg2Sqa0TmfrUKPz3F5CEbUiEYNKbMEP2LIp9H04I,9450
|
||||||
|
SQLAlchemy-2.0.19.dist-info/RECORD,,
|
||||||
|
SQLAlchemy-2.0.19.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
SQLAlchemy-2.0.19.dist-info/WHEEL,sha256=YkgppTG7ykOlXc_zwnXV9-VBXF-Nf0nL_gt0JCpKtEA,110
|
||||||
|
SQLAlchemy-2.0.19.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
|
||||||
|
sqlalchemy/__init__.py,sha256=piFrag5YJurNau3gLucpgZDH-oX3V174hsizS7exViw,12626
|
||||||
|
sqlalchemy/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/__pycache__/events.cpython-310.pyc,,
|
||||||
|
sqlalchemy/__pycache__/exc.cpython-310.pyc,,
|
||||||
|
sqlalchemy/__pycache__/inspection.cpython-310.pyc,,
|
||||||
|
sqlalchemy/__pycache__/log.cpython-310.pyc,,
|
||||||
|
sqlalchemy/__pycache__/schema.cpython-310.pyc,,
|
||||||
|
sqlalchemy/__pycache__/types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/connectors/__init__.py,sha256=uKUYWQoXyleIyjWBuh7gzgnazJokx3DaasKJbFOfQGA,476
|
||||||
|
sqlalchemy/connectors/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/connectors/__pycache__/pyodbc.cpython-310.pyc,,
|
||||||
|
sqlalchemy/connectors/pyodbc.py,sha256=FiOJGpgYZ3mNQOP5th-7IZdY0ro1eUGpYX2gBiGPKTE,8483
|
||||||
|
sqlalchemy/cyextension/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
sqlalchemy/cyextension/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/cyextension/collections.cpython-310-darwin.so,sha256=nITBCzFtm2TllPt1CLKZuLujEDjGWmlY0JexswhQFcc,214222
|
||||||
|
sqlalchemy/cyextension/collections.pyx,sha256=KDI5QTOyYz9gDl-3d7MbGMA0Kc-wxpJqnLmCaUmQy2U,12323
|
||||||
|
sqlalchemy/cyextension/immutabledict.cpython-310-darwin.so,sha256=bAdyfKQyjX8lPe50AraKsy5AvOHnlHoczeFhIhLiWXg,106064
|
||||||
|
sqlalchemy/cyextension/immutabledict.pxd,sha256=oc8BbnQwDg7pWAdThB-fzu8s9_ViOe1Ds-8T0r0POjI,41
|
||||||
|
sqlalchemy/cyextension/immutabledict.pyx,sha256=aQJPZKjcqbO8jHDqpC9F-v-ew2qAjUscc5CntaheZUk,3285
|
||||||
|
sqlalchemy/cyextension/processors.cpython-310-darwin.so,sha256=UdomFuN3Yt_qijaHd0fKmnMGjzudg8hfLal9a6FUfoI,80381
|
||||||
|
sqlalchemy/cyextension/processors.pyx,sha256=0swFIBdR19x1kPRe-dijBaLW898AhH6QJizbv4ho9pk,1545
|
||||||
|
sqlalchemy/cyextension/resultproxy.cpython-310-darwin.so,sha256=7GQvpN0krQpJS-iBppVwkB016mmxGke-sxSjXLJxe_I,83982
|
||||||
|
sqlalchemy/cyextension/resultproxy.pyx,sha256=cDtMjLTdC47g7cME369NSOCck3JwG2jwZ6j25no3_gw,2477
|
||||||
|
sqlalchemy/cyextension/util.cpython-310-darwin.so,sha256=nJ6__AF3Q8aet44gxiynYR_4K6LkXHiZNzaz1yUb9t0,102407
|
||||||
|
sqlalchemy/cyextension/util.pyx,sha256=lv03p63oVn23jLhMI4_RYGewUnJfh-4FkrNMEFL7A3Y,2289
|
||||||
|
sqlalchemy/dialects/__init__.py,sha256=-Tvtzn65H8RsXHCcXP2_WKy7pBR1blaWVw0fvsNlryA,1786
|
||||||
|
sqlalchemy/dialects/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/__pycache__/_typing.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/_typing.py,sha256=P2ML2o4b_bWAAy3zbdoUjx3vXsMNwpiOblef8ThCxlM,648
|
||||||
|
sqlalchemy/dialects/mssql/__init__.py,sha256=cHfYkDNsBKHrhaRVNdz7HFnYKAV4mQogP3sZdB5tUdA,1841
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/json.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mssql/base.py,sha256=PomFD_-YBulSIwO9zoDCTfJTjPCUhYVtDH9BFA-4Bvg,133267
|
||||||
|
sqlalchemy/dialects/mssql/information_schema.py,sha256=vVsIan_fwm2HmU13SMoBP4QgrSJ52itHgr8dOXu5INk,8067
|
||||||
|
sqlalchemy/dialects/mssql/json.py,sha256=B0m6H08CKuk-yomDHcCwfQbVuVN2WLufuVueA_qb1NQ,4573
|
||||||
|
sqlalchemy/dialects/mssql/provision.py,sha256=qNslme0Z62slaOiU1o1eOckJ6UaYUgbcRRfnR1N2lzw,4996
|
||||||
|
sqlalchemy/dialects/mssql/pymssql.py,sha256=BfJp9t-IQabqWXySJBmP9pwNTWnJqbjA2jJM9M4XeWc,4029
|
||||||
|
sqlalchemy/dialects/mssql/pyodbc.py,sha256=Z-JnUrk42pq7cyyz6HUVu_9NRcAE9uIcACw3Bhic40Q,26707
|
||||||
|
sqlalchemy/dialects/mysql/__init__.py,sha256=btLABiNnmbWt9ziW-XgVWEB1qHWQcSFz7zxZNw4m_LY,2144
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/json.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/__pycache__/types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/mysql/aiomysql.py,sha256=9aMkAAqn-cs1mvxaybTWnmuxEzdvcSfBv0Aei7gWez4,9595
|
||||||
|
sqlalchemy/dialects/mysql/asyncmy.py,sha256=U-0-BHMkVnYe19TkhIzdHe7s6ZkYJVJ6AUPQzWjOUXo,9866
|
||||||
|
sqlalchemy/dialects/mysql/base.py,sha256=9qDLsLPrAZr5AjFvvwd4BeLtyyDqGKnqwW_0oYjou4M,119088
|
||||||
|
sqlalchemy/dialects/mysql/cymysql.py,sha256=5CQVJAlqQ3pT4IDGSQJH2hCzj-EWjUitA21MLqJwEEs,2291
|
||||||
|
sqlalchemy/dialects/mysql/dml.py,sha256=qw0ZweHbMsbNyVSfC17HqylCnf7XAuIjtgofiWABT8k,7636
|
||||||
|
sqlalchemy/dialects/mysql/enumerated.py,sha256=1L2J2wT6nQEmRS4z-jzZpoi44IqIaHgBRZZB9m55czo,8439
|
||||||
|
sqlalchemy/dialects/mysql/expression.py,sha256=WW5G2XPwqJfXjuzHBt4BRP0pCLcPJkPD1mvZX1g0JL0,4066
|
||||||
|
sqlalchemy/dialects/mysql/json.py,sha256=JlSFBAHhJ9JmV-3azH80xkLgeh7g6A6DVyNVCNZiKPU,2260
|
||||||
|
sqlalchemy/dialects/mysql/mariadb.py,sha256=Sugyngvo6j6SfFFuJ23rYeFWEPdZ9Ji9guElsk_1WSQ,844
|
||||||
|
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=p5c4Mfpzz3eQwJu4z4ddRHJZftdmUOL6wSIBHFXqAcU,7466
|
||||||
|
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=5glmkPhD_KP-Mci8ZXBr4yzqH1MDfzCJ9F_kZNyXcGo,5666
|
||||||
|
sqlalchemy/dialects/mysql/mysqldb.py,sha256=dPXGcjZtvMx0CoH6530kDNginDE5YfGgxWUz0L28_0A,9654
|
||||||
|
sqlalchemy/dialects/mysql/provision.py,sha256=uPT6-BIoP_12XLmWAza1TDFNhOVVJ3rmQoMH7nvh-Vg,3226
|
||||||
|
sqlalchemy/dialects/mysql/pymysql.py,sha256=gLaQkaTU-RISblBiOlxvx5Kdumq04NI_p4fpVKXTRuQ,2944
|
||||||
|
sqlalchemy/dialects/mysql/pyodbc.py,sha256=mkOvumrxpmAi6noZlkaTVKz2F7G5vLh2vx0cZSn9VTA,4288
|
||||||
|
sqlalchemy/dialects/mysql/reflection.py,sha256=IWf3wte2XMrBwRVCUyXnIFNrP8iDdreygnlNK3PdRrk,22526
|
||||||
|
sqlalchemy/dialects/mysql/reserved_words.py,sha256=DsPHsW3vwOrvU7bv3Nbfact2Z_jyZ9xUTT-mdeQvqxo,9145
|
||||||
|
sqlalchemy/dialects/mysql/types.py,sha256=i8DpRkOL1QhPErZ25AmCQOuFLciWhdjNL3I0CeHEhdY,24258
|
||||||
|
sqlalchemy/dialects/oracle/__init__.py,sha256=HcAB9tvX7uAVHDMd2pWXKVFdcCwwjLWCXHMXgYU1EWY,1306
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/__pycache__/types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/oracle/base.py,sha256=3u4iq8MkjaoMl2EhAJJfG-_tciA8lmpiFE9l1Kki3yk,117851
|
||||||
|
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=Tno0OgmlxoSMRw9iTO5Gv47oSkOddXu1ZIphPlYSrbs,55101
|
||||||
|
sqlalchemy/dialects/oracle/dictionary.py,sha256=iUoyFEFM8z0sfVWR2n_nnre14kaQkV_syKO0R5Dos4M,19487
|
||||||
|
sqlalchemy/dialects/oracle/oracledb.py,sha256=_-fUQ94xai80B7v9WLVGoGDIv8u54nVspBdyGEyI76g,3457
|
||||||
|
sqlalchemy/dialects/oracle/provision.py,sha256=5cvIc3yTWxz4AIRYxcesbRJ1Ft-zT9GauQ911yPnN2o,8055
|
||||||
|
sqlalchemy/dialects/oracle/types.py,sha256=IhDjnE7m98jYBOp28BOull4QOaSTacNUNAOVryNFfpU,7481
|
||||||
|
sqlalchemy/dialects/postgresql/__init__.py,sha256=bZEPsLbRtB7s6TMQAHCIzKBgkxUa3eDXvCkeARua37E,3734
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/__pycache__/types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=U3aWzbKD3VOj6Z6r-4IsIQmtjGGIB4RDZH6NXfd8Xz0,5655
|
||||||
|
sqlalchemy/dialects/postgresql/array.py,sha256=tLyU9GDAeIypNhjTuFQUYbaTeijVM1VVJS6UdzzXXn4,13682
|
||||||
|
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=MhErK-QyxFUz01-8hH7E0uVHq6-4qwbbRvZciNI9fek,39350
|
||||||
|
sqlalchemy/dialects/postgresql/base.py,sha256=4JnP_ayMDvxNgAqBGQQCRTRGdj2-fjhnMWGR0HD5v6U,175403
|
||||||
|
sqlalchemy/dialects/postgresql/dml.py,sha256=_He69efdpDA5gGmBsE7Lo4ViSi3QnR38BiFmrR1tw6k,11203
|
||||||
|
sqlalchemy/dialects/postgresql/ext.py,sha256=oPP22Pq-n2lMmQ8ahifYmsmzRhSiSv1RV-xrTT0gycw,16253
|
||||||
|
sqlalchemy/dialects/postgresql/hstore.py,sha256=q5x0npbAMI8cdRFGTMwLoWFj9P1G9DUkw5OEUCfTXpI,11532
|
||||||
|
sqlalchemy/dialects/postgresql/json.py,sha256=panGtnEbcirQDy4yR2huWydFqa_Kmv8xhpLyf-SSRWE,11203
|
||||||
|
sqlalchemy/dialects/postgresql/named_types.py,sha256=zNoHsP3nVq5xxA7SOQ6LLDwYZEHFciZ-nDjw_I9f_G0,17092
|
||||||
|
sqlalchemy/dialects/postgresql/operators.py,sha256=MB40xq1124OnhUzkvtbnTmxEiey0VxMOYyznF96wwhI,2799
|
||||||
|
sqlalchemy/dialects/postgresql/pg8000.py,sha256=w6pJ3LaIKWmnwvB0Pr1aTJX5OKNtG5RNClVfkE019vU,18620
|
||||||
|
sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=0lLnIgxfCrqkx_LNijMxo0trNLsodcd8KwretZIj4uM,8875
|
||||||
|
sqlalchemy/dialects/postgresql/provision.py,sha256=oxyAzs8_PhuK0ChivXC3l2Nldih3_HKffvGsZqD8XWI,5509
|
||||||
|
sqlalchemy/dialects/postgresql/psycopg.py,sha256=YMubzQHMYN1By8QJScIPb_PwNiACv6srddQ6nX6WltQ,22238
|
||||||
|
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=busajXotATUTvm0Mh9YgXGEPoqHoOgxhWa5ZvPKP1vc,31603
|
||||||
|
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=2EOuDwBetfvelcPoTzSwOHe6X8lTwaYH7znNzXJt9eM,1739
|
||||||
|
sqlalchemy/dialects/postgresql/ranges.py,sha256=EebUBCU6aHVXG5_P_93ECA14Au557zkTtodCq3wC-WA,30260
|
||||||
|
sqlalchemy/dialects/postgresql/types.py,sha256=-T0m_4ukrsUfM_Z49Y0kGLeUbk7ZVyhqr0QLQ6oyOrw,6938
|
||||||
|
sqlalchemy/dialects/sqlite/__init__.py,sha256=wnZ9vtfm0QXmth1jiGiubFgRiKxIoQoNthb1bp4FhCs,1173
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-310.pyc,,
|
||||||
|
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=T8Vq-H-7tgIW24JOl9_KAHfqwhA6edemuByt9SJfNxQ,10897
|
||||||
|
sqlalchemy/dialects/sqlite/base.py,sha256=8htJLMebsP_yTJKub4qUJD0SgWu_4sKiRHbGhoR-Qow,96211
|
||||||
|
sqlalchemy/dialects/sqlite/dml.py,sha256=PYESBj8Ip7bGs_Fi7QjbWLXLnU9a-SbP96JZiUoZNHg,8434
|
||||||
|
sqlalchemy/dialects/sqlite/json.py,sha256=XFPwSdNx0DxDfxDZn7rmGGqsAgL4vpJbjjGaA73WruQ,2533
|
||||||
|
sqlalchemy/dialects/sqlite/provision.py,sha256=O4JDoybdb2RBblXErEVPE2P_5xHab927BQItJa203zU,5383
|
||||||
|
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=_JuOCoic--ehAGkCgnwUUKKTs6xYoBGag4Y_WkQUDwU,5347
|
||||||
|
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=xBg6DKqvml5cCGxVSAQxR1dcMvso8q4uyXs2m4WLzz0,27891
|
||||||
|
sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
|
||||||
|
sqlalchemy/engine/__init__.py,sha256=fJCAl5P7JH9iwjuWo72_3LOIzWWhTnvXqzpAmm_T0fY,2818
|
||||||
|
sqlalchemy/engine/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/_py_processors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/_py_row.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/_py_util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/characteristics.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/create.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/cursor.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/default.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/events.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/interfaces.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/mock.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/processors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/reflection.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/result.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/row.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/strategies.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/url.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/__pycache__/util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/engine/_py_processors.py,sha256=RSVKm9YppSBDSCEi8xvbZdRCP9EsCYfbyEg9iDCMCiI,3744
|
||||||
|
sqlalchemy/engine/_py_row.py,sha256=Zdta0JGa7V2aV04L7nzXUEp-H1gpresKyBlneQu60pk,3549
|
||||||
|
sqlalchemy/engine/_py_util.py,sha256=5m3MZbEqnUwP5kK_ghisFpzcXgBwSxTSkBEFB6afiD8,2245
|
||||||
|
sqlalchemy/engine/base.py,sha256=5-NFJjjNrr2SpX9FGd0DC5pDiUiZWup0HyXvP1SOFgU,122148
|
||||||
|
sqlalchemy/engine/characteristics.py,sha256=YvMgrUVAt3wsSiQ0K8l44yBjFlMK3MGajxhg50t5yFM,2344
|
||||||
|
sqlalchemy/engine/create.py,sha256=8372TLpy4FOAIZ9WmuNkx1v9DPgwpoCAH9P7LNXZCwY,32629
|
||||||
|
sqlalchemy/engine/cursor.py,sha256=YChnviiirHFSqwRhgcPfFAyqhu8HzLxwuQg-7J7v9Ao,74396
|
||||||
|
sqlalchemy/engine/default.py,sha256=o-fQq0DRI0lUe1HJWQi4XCaN386CCkisasGrG2gttys,83620
|
||||||
|
sqlalchemy/engine/events.py,sha256=9BfbZ7uyaC3rOthu1l0fGhLLE0skTtahhy2NedS-DgA,37424
|
||||||
|
sqlalchemy/engine/interfaces.py,sha256=wA8ITEkula8nFTCEo6pdt4FrG-1lPqeSIUJzfthnFj4,112828
|
||||||
|
sqlalchemy/engine/mock.py,sha256=NHYKxPsXZD4AME3Q6Z9gPddxiVshpFldDh9MrBk6DeQ,4195
|
||||||
|
sqlalchemy/engine/processors.py,sha256=ENN6XwndxJPW-aXPu_3NzAZsy5SvNznHoa1Qn29ERAw,2383
|
||||||
|
sqlalchemy/engine/reflection.py,sha256=0LTHyTbQAQ2V236_ljAbKnnzb7XpxVW244NbGgnfK0Q,75274
|
||||||
|
sqlalchemy/engine/result.py,sha256=oQZ73Dj17XVosJicp-srl0oJhADS_YOzUL9XFSAb12E,77873
|
||||||
|
sqlalchemy/engine/row.py,sha256=doiXKaUI6s6OkfqPIwNyTPLllxJfR8HYgEI8ve9VYe0,11955
|
||||||
|
sqlalchemy/engine/strategies.py,sha256=HjCj_FHQOgkkhhtnVmcOEuHI_cftNo3P0hN5zkhZvDc,442
|
||||||
|
sqlalchemy/engine/url.py,sha256=_WNE7ia0JIPRc1PLY_jSA3F7bB5kp1gzuzkc5eoKviA,30694
|
||||||
|
sqlalchemy/engine/util.py,sha256=Y5euVW6-DGJaxIgUachA2n1aiqm2M3cB-tCG2joRVt4,5683
|
||||||
|
sqlalchemy/event/__init__.py,sha256=CSBMp0yu5joTC6tWvx40B4p87N7oGKxC-ZLx2ULKUnQ,997
|
||||||
|
sqlalchemy/event/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/event/__pycache__/api.cpython-310.pyc,,
|
||||||
|
sqlalchemy/event/__pycache__/attr.cpython-310.pyc,,
|
||||||
|
sqlalchemy/event/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/event/__pycache__/legacy.cpython-310.pyc,,
|
||||||
|
sqlalchemy/event/__pycache__/registry.cpython-310.pyc,,
|
||||||
|
sqlalchemy/event/api.py,sha256=nQAvPK1jrLpmu8aKCUtc-vYWcIuG-1FgAtp3GRkfIiI,8227
|
||||||
|
sqlalchemy/event/attr.py,sha256=NMe_sPQTju2PE-f68C8TcKJGW-Gxyi1CLXumAmE368Y,20438
|
||||||
|
sqlalchemy/event/base.py,sha256=ECHisETYKRpHGzdhwxbGNnfFjCfD3OKKN9_A8xQsDSM,14996
|
||||||
|
sqlalchemy/event/legacy.py,sha256=OpPqE64xk1OYjLW1scvc6iijhoa5GZJt5f7-beWhgOc,8211
|
||||||
|
sqlalchemy/event/registry.py,sha256=XFjAAmV8vjHfdU7DOqIxu6BXBW2vFIIKJ6eYXiu_-ko,10861
|
||||||
|
sqlalchemy/events.py,sha256=pRcPKKsPQHGPH_pvTtKRmzuEIy-QHCtkUiZl4MUbxKs,536
|
||||||
|
sqlalchemy/exc.py,sha256=4SMKOJtz7_SWt5vskCSeXSi4ZlFyL4jh53Q8sk4-ODQ,24011
|
||||||
|
sqlalchemy/ext/__init__.py,sha256=w4h7EpXjKPr0LD4yHa0pDCfrvleU3rrX7mgyb8RuDYQ,322
|
||||||
|
sqlalchemy/ext/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/associationproxy.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/automap.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/baked.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/compiler.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/hybrid.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/indexable.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/instrumentation.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/mutable.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/orderinglist.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/__pycache__/serializer.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/associationproxy.py,sha256=l7CTANXWmEzA7Hb6_fYY45I0wnTW3iryGHmuppn3qyo,65992
|
||||||
|
sqlalchemy/ext/asyncio/__init__.py,sha256=oaV5vnuL6DOd_n1TApySI157_380Pd9bskjjriS7iik,1255
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/result.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/__pycache__/session.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/asyncio/base.py,sha256=LQzW7Qo9xvYutjDlwrh3Ln1AqIeyEGZR6vXCxh7ue5E,9015
|
||||||
|
sqlalchemy/ext/asyncio/engine.py,sha256=s-swQNL0smbgugoq2gbfi8G0bf7JhLbhFibu5EOoxPo,47370
|
||||||
|
sqlalchemy/ext/asyncio/exc.py,sha256=1hCdOKzvSryc_YE4jgj0l9JASOmZXutdzShEYPiLbGI,639
|
||||||
|
sqlalchemy/ext/asyncio/result.py,sha256=YHLhgGklf77hEbhNwZ3JhK4Nj52FB-4YTkC-3PbRGE4,30570
|
||||||
|
sqlalchemy/ext/asyncio/scoping.py,sha256=rr-wH9VhSW04p9dJBzaT8c68HkP1seqiCnKcMiMKVF8,49781
|
||||||
|
sqlalchemy/ext/asyncio/session.py,sha256=SGEHtQF9PMF7-qkfSQnC79wUvmWArNfdIsU9fy3-vMk,60274
|
||||||
|
sqlalchemy/ext/automap.py,sha256=7p13-VpN0MOM525r7pmEnftedya9l5G-Ei_cFXZfpTc,61431
|
||||||
|
sqlalchemy/ext/baked.py,sha256=R8ZAxiVN6eH50AJu0O3TtFXNE1tnRkMlSj3AvkcWFhY,17818
|
||||||
|
sqlalchemy/ext/compiler.py,sha256=h7eR0NcPJ4F_k8YGRP3R9YX75Y9pgiVxoCjRyvceF7g,20391
|
||||||
|
sqlalchemy/ext/declarative/__init__.py,sha256=VJu8S1efxil20W48fJlpDn6gHorOudn5p3-lF72WcJ8,1818
|
||||||
|
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/declarative/extensions.py,sha256=vwZjudPFA_mao1U04-RZCaU_tvPMBgQa5OTmSI7K7SU,19547
|
||||||
|
sqlalchemy/ext/horizontal_shard.py,sha256=eh14W8QWHYH22PL1l5qF_ad9Fyh1WAFjKi_vNfsme94,16766
|
||||||
|
sqlalchemy/ext/hybrid.py,sha256=tlB_RteXPJ2dwoyUyI23Dlr2VG7P9IjqQEzrpyw5tGs,52515
|
||||||
|
sqlalchemy/ext/indexable.py,sha256=RkG9BKwil-TqDjVBM14ML9c-geUrHxtRKpYkSJEwGHA,11028
|
||||||
|
sqlalchemy/ext/instrumentation.py,sha256=rjjSbTGilYeGLdyEWV932TfTaGxiVP44_RajinANk54,15723
|
||||||
|
sqlalchemy/ext/mutable.py,sha256=d3Pp8PcAVN4pHN9rhc1ReXBWe0Q70Q5S1klFoYGyDPA,37393
|
||||||
|
sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/apply.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/infer.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/names.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/__pycache__/util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/ext/mypy/apply.py,sha256=uUES4grydYtKykLKlxzJeBXeGe8kfWou9_rzEyEkfp0,10503
|
||||||
|
sqlalchemy/ext/mypy/decl_class.py,sha256=Ls2Efh4kEhle6Z4VMz0GRBgGQTYs2fHr5b4DfuDj44c,17377
|
||||||
|
sqlalchemy/ext/mypy/infer.py,sha256=si720RW6iGxMRZNP5tcaIxA1_ehFp215TzxVXaLjglU,19364
|
||||||
|
sqlalchemy/ext/mypy/names.py,sha256=CWPZVswhNNBCpb49Vf2w-Y8dxxaYPj9JbiOSLtczfEY,10967
|
||||||
|
sqlalchemy/ext/mypy/plugin.py,sha256=fLXDukvZqbJ0JJCOoyZAuOniYZ_F1YT-l9gKppu8SEs,9750
|
||||||
|
sqlalchemy/ext/mypy/util.py,sha256=JjCOeykHtyQv1ZNiRWjgrUn-WX1PhDMi-VobA5UXitk,9424
|
||||||
|
sqlalchemy/ext/orderinglist.py,sha256=8Vcg7UUkLg-QbYAbLVDSqu-5REkR6L-FLLhCYsHYxCQ,14384
|
||||||
|
sqlalchemy/ext/serializer.py,sha256=ox6dbMOBmFR0H2RQFt17mcYBOGKgn1cNVFfqY8-jpgQ,6178
|
||||||
|
sqlalchemy/future/__init__.py,sha256=79DZx3v7TQZpkS_qThlmuCOm1a9UK2ObNZhyMmjfNB0,516
|
||||||
|
sqlalchemy/future/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/future/__pycache__/engine.cpython-310.pyc,,
|
||||||
|
sqlalchemy/future/engine.py,sha256=6uOpOedIqiT1-3qJSJIlv9_raMJU8NTkhQwN_Ngg8kI,499
|
||||||
|
sqlalchemy/inspection.py,sha256=i3aR-IV101YU8D9TA8Pxb2wi08QZuJ34sMy6L5M__rY,5145
|
||||||
|
sqlalchemy/log.py,sha256=w4mCPtdsvdSSxVXoLEO5qaAiTXxZ9QZqZ4uWwn1Xpdw,8628
|
||||||
|
sqlalchemy/orm/__init__.py,sha256=cBn0aPWyDFY4ya-cHRshQBcuThk1smTUCTrlp6LHdlE,8463
|
||||||
|
sqlalchemy/orm/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/_orm_constructors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/_typing.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/attributes.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/bulk_persistence.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/clsregistry.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/collections.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/context.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/decl_api.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/decl_base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/dependency.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/descriptor_props.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/dynamic.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/evaluator.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/events.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/exc.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/identity.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/instrumentation.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/interfaces.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/loading.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/mapped_collection.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/mapper.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/path_registry.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/persistence.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/properties.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/query.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/relationships.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/scoping.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/session.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/state.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/state_changes.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/strategies.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/strategy_options.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/sync.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/unitofwork.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/__pycache__/writeonly.cpython-310.pyc,,
|
||||||
|
sqlalchemy/orm/_orm_constructors.py,sha256=Wu5fVc2-W9Lz5pBX40LFX9hMKGLa7mduiw2Qc7ODNu4,99767
|
||||||
|
sqlalchemy/orm/_typing.py,sha256=TH2_JcWdtDVG-wHhqOWEktkAI6RS2PbYjv-ttYbhZZU,5008
|
||||||
|
sqlalchemy/orm/attributes.py,sha256=HosCWIvF9yyTYiKwC78_KwSpfI-EZWzGBL7akad5Pb8,92617
|
||||||
|
sqlalchemy/orm/base.py,sha256=2J2U_28JOhyOeQKkRsdsdK25_jzOXvAU2NJfubwR0fE,27471
|
||||||
|
sqlalchemy/orm/bulk_persistence.py,sha256=5qmLeyrWL8vAQNjwBpZbCMk30t_apNwaiEre2pcj9jU,69426
|
||||||
|
sqlalchemy/orm/clsregistry.py,sha256=4J-kKshmLOEyx3VBqREm2k_XY0cer4zwUoHJT3n5Xmw,17949
|
||||||
|
sqlalchemy/orm/collections.py,sha256=0AZFr9us9MiHo_Xcyi7DUsN02jSBERUOd-jIK8qQ1DA,52159
|
||||||
|
sqlalchemy/orm/context.py,sha256=52Lv_rhKCQn-xBalUtddGG1yXtVWmKmZ1qU96-d1UZA,110488
|
||||||
|
sqlalchemy/orm/decl_api.py,sha256=nb_LqSK2ja4kncjX9LHxzaB5OJotvHhmIdktV02DoDA,63763
|
||||||
|
sqlalchemy/orm/decl_base.py,sha256=JW1llPLGjyKu8AamLx4lBxsnDuQ0gpgMnOFadzJeqos,81278
|
||||||
|
sqlalchemy/orm/dependency.py,sha256=g3R_1H_OGzagXFeen3Irm3c1lO3yeXGdGa0muUZgZAk,47583
|
||||||
|
sqlalchemy/orm/descriptor_props.py,sha256=4ioza3TnVE_2RC2wSJLDCa1WNJnim_i0yUiuPq8BQDs,37428
|
||||||
|
sqlalchemy/orm/dynamic.py,sha256=-4kTkjq6Z88L1STsjybi27cMXc1OVlmmvtPxE_DmJKI,8622
|
||||||
|
sqlalchemy/orm/evaluator.py,sha256=jPjVrP7XbVOG6aXTCBREq0rF3oNHLqB4XAT-gt_cpaA,11925
|
||||||
|
sqlalchemy/orm/events.py,sha256=fGnUHwDTV9FTiifB2mmIJispwPbIT4mZongRJD7uiw4,127258
|
||||||
|
sqlalchemy/orm/exc.py,sha256=A3wvZVs5sC5XCef4LoTUBG-UfhmliFpU9rYMdS2t_To,7356
|
||||||
|
sqlalchemy/orm/identity.py,sha256=gRiuQSrurHGEAJXH9QGYioXL49Im5EGcYQ-IKUEpHmQ,9249
|
||||||
|
sqlalchemy/orm/instrumentation.py,sha256=UiICt6MaDo3kf2x71F-NJZ6Q7zv2sN2-9zLwm4zOARU,24451
|
||||||
|
sqlalchemy/orm/interfaces.py,sha256=dtYqmPqgtn9B-DzfYCrV13kL80nh5Bh8yqgsQXeBdrc,48410
|
||||||
|
sqlalchemy/orm/loading.py,sha256=o3R9yKDigR21aoNeyX6-KpSyvXesC9pQ91IMbaA_J8A,56405
|
||||||
|
sqlalchemy/orm/mapped_collection.py,sha256=bNs7GOMZW-LSs2LQ_63lYqhdZ3IHp5Bz2bm_tzqr7i8,19275
|
||||||
|
sqlalchemy/orm/mapper.py,sha256=Zhw11mT00q-wxt0TiCw1f6i6ggEarIJt5TMna_U61u4,171024
|
||||||
|
sqlalchemy/orm/path_registry.py,sha256=slcQySQAewi-c5UdphsFCyUgvHWWapNMuSRXo4iRqIo,24606
|
||||||
|
sqlalchemy/orm/persistence.py,sha256=oKqEic1r4jCI069PP7Dw_VxTHyvjD9iEHSHLK45imik,60516
|
||||||
|
sqlalchemy/orm/properties.py,sha256=OOSn-a2i8Oqk6lyrlLCUDHgrBhm9ub_gma6AnO1a7ZU,26811
|
||||||
|
sqlalchemy/orm/query.py,sha256=cs85Bh3OqfRqE3qhcsNlftvgJejgODJhkkixdVBjLC4,117883
|
||||||
|
sqlalchemy/orm/relationships.py,sha256=QDIsqlk2M3lL2z_a86ZZhtGmy-FXL9aRbluhI-UCtA8,127802
|
||||||
|
sqlalchemy/orm/scoping.py,sha256=fdoHbwZgiqRpCtkKqivwBB5_ulKbPKc_nkErRjrDDMc,74844
|
||||||
|
sqlalchemy/orm/session.py,sha256=1pCDYIFYLDwtaVg7LFIF0X3qNbYhHrzD2Z0h45dTL9k,187484
|
||||||
|
sqlalchemy/orm/state.py,sha256=Kfgg1AguWYjr8rd0neZqnVYVg8_hhvPKCbmoMRJDmoI,37581
|
||||||
|
sqlalchemy/orm/state_changes.py,sha256=pqkjSDOR6H5BufMKdzFUIatDp3DY90SovOJiJ1k6Ayw,6815
|
||||||
|
sqlalchemy/orm/strategies.py,sha256=5VahSJVSdkGZ3SpTXirfmJ2KFW3PutxFZcFDhE9_xbY,113754
|
||||||
|
sqlalchemy/orm/strategy_options.py,sha256=BqRTPbdMtoL3eUJ9PJX1seqAeNEV0WJTcxjKQABbhJ4,81790
|
||||||
|
sqlalchemy/orm/sync.py,sha256=5Nt_OqP4IfhAtHwFRar4dw-YjLENRLvp4d3jDC4wpnw,5749
|
||||||
|
sqlalchemy/orm/unitofwork.py,sha256=Wk5YZocBbxe4m1wU2aFQ7gY1Cp5CROi13kDEM1iOSz4,27033
|
||||||
|
sqlalchemy/orm/util.py,sha256=PSb1abPNkv76GJkU9295nKUgE-jZXIFtiA7P5MqVNPg,79987
|
||||||
|
sqlalchemy/orm/writeonly.py,sha256=4Mi9Nto48572t2l65jGhUTTDeMlUxjD9xidnmafFP9U,19537
|
||||||
|
sqlalchemy/pool/__init__.py,sha256=CIv4b6ctueY7w3sML_LxyLKAdl59esYOhz3O7W5w7WE,1815
|
||||||
|
sqlalchemy/pool/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/pool/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/pool/__pycache__/events.cpython-310.pyc,,
|
||||||
|
sqlalchemy/pool/__pycache__/impl.cpython-310.pyc,,
|
||||||
|
sqlalchemy/pool/base.py,sha256=FrVZ6_0sNr9P1tudH55UugMB578FgADDsfH_zsMjPG0,52350
|
||||||
|
sqlalchemy/pool/events.py,sha256=eb5t6zLqZ0yErsaCt3-3m_-KjnK2VkGO5ILvnHY1Fmg,13201
|
||||||
|
sqlalchemy/pool/impl.py,sha256=bwd4GQ5FHzlblAfaXoD_23bwxFiLqQF5xEUY18qImRo,17740
|
||||||
|
sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
sqlalchemy/schema.py,sha256=mt74CGCBtfv_qI1_6zzNFMexYGyWDj2Jkh-XdH4kEWI,3194
|
||||||
|
sqlalchemy/sql/__init__.py,sha256=TMjmKl6YaAK6jXtmPLqPDn4yrbShVC3Wc9njjU1MrUw,5730
|
||||||
|
sqlalchemy/sql/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/_dml_constructors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/_elements_constructors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/_orm_types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/_py_util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/_typing.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/annotation.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/cache_key.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/coercions.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/compiler.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/crud.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/ddl.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/default_comparator.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/dml.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/elements.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/events.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/expression.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/functions.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/lambdas.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/naming.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/operators.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/roles.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/schema.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/selectable.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/sqltypes.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/traversals.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/type_api.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/__pycache__/visitors.cpython-310.pyc,,
|
||||||
|
sqlalchemy/sql/_dml_constructors.py,sha256=hoNyINY3FNi1ZQajR6lbcRN7oYsNghM1wuzzVWxIv3c,3867
|
||||||
|
sqlalchemy/sql/_elements_constructors.py,sha256=-qksx59Gqhmzxo1xByPtZZboNvL8uYcCN14pjHYHxL8,62914
|
||||||
|
sqlalchemy/sql/_orm_types.py,sha256=_vR3_HQYgZR_of6_ZpTQByie2gaVScxQjVAVWAP3Ztg,620
|
||||||
|
sqlalchemy/sql/_py_util.py,sha256=iiwgX3dQhOjdB5-10jtgHPIdibUqGk49bC1qdZMBpYI,2173
|
||||||
|
sqlalchemy/sql/_selectable_constructors.py,sha256=RDqgejqiUuU12Be1jBpMIx_YdJho8fhKfnMoJLPFTFE,18812
|
||||||
|
sqlalchemy/sql/_typing.py,sha256=t_g3EqzSK9PGCVGCCGwNso2gcmupcFKJalMD1-_l8ng,10457
|
||||||
|
sqlalchemy/sql/annotation.py,sha256=8iye1kbFDeKYcgAW7vaBg7HdcZLvUtjRXUzVS6ZRNLk,17904
|
||||||
|
sqlalchemy/sql/base.py,sha256=xMnomm_y8T4215FhhEFQVM2R21cgphqIjwq_FMsxbc8,73762
|
||||||
|
sqlalchemy/sql/cache_key.py,sha256=ObhC6btwzChajxhNskBnBCrJ3aVpPzMC1KKII0bbhsc,32747
|
||||||
|
sqlalchemy/sql/coercions.py,sha256=ju8xEi7b9G_GzxaQ6Nwu0cFIWFZ--ottIVfdiuhHY7Y,40553
|
||||||
|
sqlalchemy/sql/compiler.py,sha256=T5SGEoxOVjHh_LN9YTXhlc5WHch20TtSTHzS0VI-bx0,268268
|
||||||
|
sqlalchemy/sql/crud.py,sha256=FSJRsmd3jDA1SRhuCIfJdaRD7jQ1lmqU4G4vcFMDfNs,55647
|
||||||
|
sqlalchemy/sql/ddl.py,sha256=ubFUBB8ajD4_TBY5LEmpEFn6PNPy4ctesNU6f36DPec,45689
|
||||||
|
sqlalchemy/sql/default_comparator.py,sha256=SE0OaK1BlY0RinQ21ZXJOUGkO00oGv6GMMmAH-4iNTQ,16663
|
||||||
|
sqlalchemy/sql/dml.py,sha256=uxfHb0sG4IvtFx1G9MGrrxL9Bm_kqlX52sRV-IKtMN8,65550
|
||||||
|
sqlalchemy/sql/elements.py,sha256=f7VALJB4w5-MLdxLLsUCC9iSVVDLdbHGj3Y7qabxB18,169660
|
||||||
|
sqlalchemy/sql/events.py,sha256=xe3vJ6pQJau3dJWBAY0zU7Lz52UKuMrpLycriLm3AWA,18301
|
||||||
|
sqlalchemy/sql/expression.py,sha256=baMnCH04jeE8E3tA2TovXlsREocA2j3fdHKnzOB8H4U,7586
|
||||||
|
sqlalchemy/sql/functions.py,sha256=A5SRLM6EqY4RQ4e0LI5yCjGwzRIjn7Sz8mLFLSfmCHc,54492
|
||||||
|
sqlalchemy/sql/lambdas.py,sha256=kUbj1HTqzGdTeKFVZDR0t82JnotYVBfjHE8jV_-2l3w,49309
|
||||||
|
sqlalchemy/sql/naming.py,sha256=l8udFP2wvXLgehIB0uF2KXwpkXSVSREDk6fLCH9F-XY,6865
|
||||||
|
sqlalchemy/sql/operators.py,sha256=uNgF6gCArK-_UJTK8UjgjnYUcpgqIm5fWzz-Zz3Cs_U,75895
|
||||||
|
sqlalchemy/sql/roles.py,sha256=fRi_bOduJED_Dk2PjudvAzEdpf0JXgfv1b5onjKHmsU,7628
|
||||||
|
sqlalchemy/sql/schema.py,sha256=7wAXvFUehXzrF_FH-CJG2NXh_AhHaKV9dVnUPeJ_ymc,227000
|
||||||
|
sqlalchemy/sql/selectable.py,sha256=XSL6o79lor6AWvg_Rfv4BskkcTlNjui0Rmh4s_KfYxw,232342
|
||||||
|
sqlalchemy/sql/sqltypes.py,sha256=SaznYM5eXYuNkUnlFM1HT3-fNe_d9zTJQ8lXEc6FTZY,126049
|
||||||
|
sqlalchemy/sql/traversals.py,sha256=UT2g9KbITMGf4EELAzR8OGlqRVNANlMgPgalU_MkDyU,33601
|
||||||
|
sqlalchemy/sql/type_api.py,sha256=EJ1RFnAAExqs7BviHLo3VmSDda7gBxIY8nRx3zc7bU0,85191
|
||||||
|
sqlalchemy/sql/util.py,sha256=5CGmTf3LueJ34nDUCrpfB_WqmMSLBDYC6UgK-H7LJik,48269
|
||||||
|
sqlalchemy/sql/visitors.py,sha256=IXzmK9IZr40ZSwP8ucRb6A1sgDC3_wpm0nM-qayg3bI,36354
|
||||||
|
sqlalchemy/testing/__init__.py,sha256=9M2SMxBBLJ8xLUWXNCWDzkcvOqFznWcJzrSd712vATU,3126
|
||||||
|
sqlalchemy/testing/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/assertions.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/assertsql.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/asyncio.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/config.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/engines.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/entities.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/exclusions.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/pickleable.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/profiling.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/provision.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/requirements.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/schema.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/util.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/__pycache__/warnings.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/assertions.py,sha256=lNNZ-gfF4TDRXmB7hZDdch7JYZRb_qWGeqWDFKtopx0,31439
|
||||||
|
sqlalchemy/testing/assertsql.py,sha256=fIlHNWaGUR439L6yF98kGz4E_VBIJw5BvpnnQoCutCA,16775
|
||||||
|
sqlalchemy/testing/asyncio.py,sha256=cAw68tzu3h5wjdIKfOqhFATcbMb38XeK0ThjIalUHuQ,3728
|
||||||
|
sqlalchemy/testing/config.py,sha256=KInfuAJlcdRLHqZSzUPSyqRf56yDy1jDXZrE83eI_30,11010
|
||||||
|
sqlalchemy/testing/engines.py,sha256=w5-0FbanItRsOt6x4n7wM_OnToCzJnrvZZ2hk5Yzng8,13355
|
||||||
|
sqlalchemy/testing/entities.py,sha256=rysywsnjXHlIIC-uv0L7-fLmTAuNpHJvcSd1HeAdY5M,3354
|
||||||
|
sqlalchemy/testing/exclusions.py,sha256=uoYLEwyNOK1eR8rpfOZ2Q3dxgY0akM-RtsIFML-FPrY,12444
|
||||||
|
sqlalchemy/testing/fixtures/__init__.py,sha256=9snVns5A7g28LqC6gqQuO4xRBoJzdnf068GQ6Cae75I,1198
|
||||||
|
sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/fixtures/__pycache__/base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/fixtures/__pycache__/orm.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/fixtures/__pycache__/sql.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/fixtures/base.py,sha256=OayRr25soCqj1_yc665D5XbWWzFCm7Xl9Txtps953p4,12256
|
||||||
|
sqlalchemy/testing/fixtures/mypy.py,sha256=7fWVZzYzNjqmLIoFa-MmXSGDPS3eZYFXlH-WxaxBDDY,11845
|
||||||
|
sqlalchemy/testing/fixtures/orm.py,sha256=x27qjpK54JETATcYuiphtW-HXRy8ej8h3aCDkeQXPfY,6095
|
||||||
|
sqlalchemy/testing/fixtures/sql.py,sha256=Q7Qq0n4qTT681nWt5DqjThopgjv5BB2KmSmrmAxUqHM,15704
|
||||||
|
sqlalchemy/testing/pickleable.py,sha256=B9dXGF7E2PywB67SngHPjSMIBDTFhyAV4rkDUcyMulk,2833
|
||||||
|
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/plugin/bootstrap.py,sha256=GrBB27KbswjE3Tt-zJlj6uSqGh9N-_CXkonnJSSBz84,1437
|
||||||
|
sqlalchemy/testing/plugin/plugin_base.py,sha256=QiM_s82jPf4mA8HB7SDCMOP7iulCDTYrL7JNvYFv_CA,21335
|
||||||
|
sqlalchemy/testing/plugin/pytestplugin.py,sha256=yh4PP406O0TwPMDzpJHpcNdU2WHXCLYI10F3oOLePjE,27295
|
||||||
|
sqlalchemy/testing/profiling.py,sha256=HPjYvRLT1nD90FCZ7AA8j9ygkMtf1SGA47Xze2QPueo,10148
|
||||||
|
sqlalchemy/testing/provision.py,sha256=w4F_ceGHPpWHUeh6cVcE5ktCC-ISrGc2yOSnXauOd5U,14200
|
||||||
|
sqlalchemy/testing/requirements.py,sha256=lsIcOEZfn-pgrMqnsJ55NrVnpq-CQETwXg-0Mk3HfVs,50709
|
||||||
|
sqlalchemy/testing/schema.py,sha256=OSfMoIJ7ORbevGkeJdrKcTrQ0s7wXebuCU08mC1Y9jA,6513
|
||||||
|
sqlalchemy/testing/suite/__init__.py,sha256=_firVc2uS3TMZ3vH2baQzNb17ubM78RHtb9kniSybmk,476
|
||||||
|
sqlalchemy/testing/suite/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_results.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_select.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_types.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-310.pyc,,
|
||||||
|
sqlalchemy/testing/suite/test_cte.py,sha256=O5idVeBnHm9zdiG3tuCBUn4hYU_TA63-6LNnRygr8g0,6205
|
||||||
|
sqlalchemy/testing/suite/test_ddl.py,sha256=xWimTjggpTe3S1Xfmt_IPofTXkUUcKuVSVCIfIyGMbA,11785
|
||||||
|
sqlalchemy/testing/suite/test_deprecations.py,sha256=XI8ZU1NxC-6uvPDImaaq9O7Ov6MF5gmy-yk3TfesLAo,5082
|
||||||
|
sqlalchemy/testing/suite/test_dialect.py,sha256=_5gZVBqq6jYhzKj7pgGo6FHrSPeEgZYmiPn2Q28gYqA,21015
|
||||||
|
sqlalchemy/testing/suite/test_insert.py,sha256=xbMkoNgDY7wSRBXOLny1JltZwUN8HiH_OIwpOh-HD6M,17368
|
||||||
|
sqlalchemy/testing/suite/test_reflection.py,sha256=YUgTW76IFVNjiYJoZfFZa0W1WVYBZNo5rjynZLxlljA,104478
|
||||||
|
sqlalchemy/testing/suite/test_results.py,sha256=BSNHQ1SxhU964RS5AIkKMqE_JrVjCabtODLHr5EYfHo,15665
|
||||||
|
sqlalchemy/testing/suite/test_rowcount.py,sha256=zA0Q3Guf-TQioyLmNZ6HWCUOEuEhf7q-uI2J72j2kjk,6147
|
||||||
|
sqlalchemy/testing/suite/test_select.py,sha256=QHsBX16EZpxlEZZLM0pMNcwayPU0dig39McKwiiith0,58325
|
||||||
|
sqlalchemy/testing/suite/test_sequence.py,sha256=c80CBWrU930GPnPfr9TCRbTTuITR7BpIactncLIj2XU,9672
|
||||||
|
sqlalchemy/testing/suite/test_types.py,sha256=GTully5oymZ9I_GCrw1q0_kVRcnPSOnfT89shObI8x0,61834
|
||||||
|
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=7obItCpFt4qlWaDqe25HWgQT6FoUhgz1W7_Xycfz9Xk,5887
|
||||||
|
sqlalchemy/testing/suite/test_update_delete.py,sha256=VxhsI37iivEYejQ38duuT4dida9iXH_4EK3QMvaXMZM,1648
|
||||||
|
sqlalchemy/testing/util.py,sha256=Wsu4GZgCW6wX9mmxfiffhDz1cZm3778OB3LtiWNgb3Y,14080
|
||||||
|
sqlalchemy/testing/warnings.py,sha256=pmfT33PF1q1PI7DdHOsup3LxHq1AC4-aYl1oL8HmrYo,1546
|
||||||
|
sqlalchemy/types.py,sha256=DgBpPaT-vtsn6_glx5wocrIhR2A1vy56SQNRY3NiPUw,3168
|
||||||
|
sqlalchemy/util/__init__.py,sha256=Bh0SkfkeCsz6-rbDmC41lAWOuCvKCiXVZthN2cWJEXk,8245
|
||||||
|
sqlalchemy/util/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/_collections.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/_has_cy.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/_py_collections.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/compat.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/concurrency.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/deprecations.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/langhelpers.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/preloaded.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/queue.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/tool_support.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/topological.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/__pycache__/typing.cpython-310.pyc,,
|
||||||
|
sqlalchemy/util/_collections.py,sha256=SzJIKREJgfGTjOZRgvcwPGQR2_x_7k8LzFbuzqzJ2Mg,20401
|
||||||
|
sqlalchemy/util/_concurrency_py3k.py,sha256=SNvy2lOf2cb15CwcfdIHfFJFKXXV-VoBfb-pK-iScDA,8284
|
||||||
|
sqlalchemy/util/_has_cy.py,sha256=XMkeqCDGmhkd0uuzpCdyELz7gOjHxyFQ1AIlc5NneoY,1229
|
||||||
|
sqlalchemy/util/_py_collections.py,sha256=8ZL6A4hrvSv2w4K4PaZImHhm0sLNAzaEvqyrWyOzFic,16678
|
||||||
|
sqlalchemy/util/compat.py,sha256=cdYRtH6ZMSVY8WgzRNDZ8yagOSYG1xjuSyQEBs6Q6Pg,9109
|
||||||
|
sqlalchemy/util/concurrency.py,sha256=ZxcQYOKy-GBsQkPmCrBO5MzMpqW3JZme2Hiyqpbt9uc,2284
|
||||||
|
sqlalchemy/util/deprecations.py,sha256=R-1ALq_6-JqktzVMxOBS5jh_DNi5gcM5uP0IBd22jcQ,12113
|
||||||
|
sqlalchemy/util/langhelpers.py,sha256=y_7NUQYLOFki1llVD73HHYqL-80OPXYl5xghZpyIQgE,64931
|
||||||
|
sqlalchemy/util/preloaded.py,sha256=KKNLJEqChDW1TNUsM_TzKu7JYEA3kkuh2N-quM_2_Y4,5905
|
||||||
|
sqlalchemy/util/queue.py,sha256=ITejs6KS4Hz_ojrss2oFeUO9MoIeR3qWmZQ8J7yyrNU,10205
|
||||||
|
sqlalchemy/util/tool_support.py,sha256=epm8MzDZpVmhE6LIjrjJrP8BUf12Wab2m28A9lGq95s,5969
|
||||||
|
sqlalchemy/util/topological.py,sha256=hjJWL3C_B7Rpv9s7jj7wcTckcZUSkxc6xRDhiN1xyec,3458
|
||||||
|
sqlalchemy/util/typing.py,sha256=I426vml9ezDOVSMFE3FL2XaqF1eT73zke0bXJEdPXUs,15640
|
|
@ -0,0 +1,5 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.40.0)
|
||||||
|
Root-Is-Purelib: false
|
||||||
|
Tag: cp310-cp310-macosx_11_0_arm64
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
sqlalchemy
|
|
@ -0,0 +1,128 @@
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import importlib
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||||
|
|
||||||
|
|
||||||
|
warnings.filterwarnings('ignore',
|
||||||
|
r'.+ distutils\b.+ deprecated',
|
||||||
|
DeprecationWarning)
|
||||||
|
|
||||||
|
|
||||||
|
def warn_distutils_present():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
if is_pypy and sys.version_info < (3, 7):
|
||||||
|
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||||
|
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||||
|
return
|
||||||
|
warnings.warn(
|
||||||
|
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||||
|
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||||
|
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||||
|
"using distutils directly, ensure that setuptools is installed in the "
|
||||||
|
"traditional way (e.g. not an editable install), and/or make sure "
|
||||||
|
"that setuptools is always imported before distutils.")
|
||||||
|
|
||||||
|
|
||||||
|
def clear_distutils():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
warnings.warn("Setuptools is replacing distutils.")
|
||||||
|
mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
|
||||||
|
for name in mods:
|
||||||
|
del sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
def enabled():
|
||||||
|
"""
|
||||||
|
Allow selection of distutils by environment variable.
|
||||||
|
"""
|
||||||
|
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
|
||||||
|
return which == 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_local_distutils():
|
||||||
|
clear_distutils()
|
||||||
|
distutils = importlib.import_module('setuptools._distutils')
|
||||||
|
distutils.__name__ = 'distutils'
|
||||||
|
sys.modules['distutils'] = distutils
|
||||||
|
|
||||||
|
# sanity check that submodules load as expected
|
||||||
|
core = importlib.import_module('distutils.core')
|
||||||
|
assert '_distutils' in core.__file__, core.__file__
|
||||||
|
|
||||||
|
|
||||||
|
def do_override():
|
||||||
|
"""
|
||||||
|
Ensure that the local copy of distutils is preferred over stdlib.
|
||||||
|
|
||||||
|
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||||
|
for more motivation.
|
||||||
|
"""
|
||||||
|
if enabled():
|
||||||
|
warn_distutils_present()
|
||||||
|
ensure_local_distutils()
|
||||||
|
|
||||||
|
|
||||||
|
class DistutilsMetaFinder:
|
||||||
|
def find_spec(self, fullname, path, target=None):
|
||||||
|
if path is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||||
|
method = getattr(self, method_name, lambda: None)
|
||||||
|
return method()
|
||||||
|
|
||||||
|
def spec_for_distutils(self):
|
||||||
|
import importlib.abc
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
class DistutilsLoader(importlib.abc.Loader):
|
||||||
|
|
||||||
|
def create_module(self, spec):
|
||||||
|
return importlib.import_module('setuptools._distutils')
|
||||||
|
|
||||||
|
def exec_module(self, module):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return importlib.util.spec_from_loader('distutils', DistutilsLoader())
|
||||||
|
|
||||||
|
def spec_for_pip(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running under pip.
|
||||||
|
See pypa/pip#8761 for rationale.
|
||||||
|
"""
|
||||||
|
if self.pip_imported_during_build():
|
||||||
|
return
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pip_imported_during_build():
|
||||||
|
"""
|
||||||
|
Detect if pip is being imported in a build script. Ref #2355.
|
||||||
|
"""
|
||||||
|
import traceback
|
||||||
|
return any(
|
||||||
|
frame.f_globals['__file__'].endswith('setup.py')
|
||||||
|
for frame, line in traceback.walk_stack(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||||
|
|
||||||
|
|
||||||
|
def add_shim():
|
||||||
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_shim():
|
||||||
|
try:
|
||||||
|
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
|
@ -0,0 +1 @@
|
||||||
|
__import__('_distutils_hack').do_override()
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,221 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: annotated-types
|
||||||
|
Version: 0.5.0
|
||||||
|
Summary: Reusable constraint types to use with typing.Annotated
|
||||||
|
Author-email: Samuel Colvin <s@muelcolvin.com>, Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Zac Hatfield-Dodds <zac@zhd.dev>
|
||||||
|
License-File: LICENSE
|
||||||
|
Classifier: Development Status :: 4 - Beta
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Environment :: MacOS X
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Intended Audience :: Information Technology
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Operating System :: POSIX :: Linux
|
||||||
|
Classifier: Operating System :: Unix
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Classifier: Typing :: Typed
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
|
||||||
|
# annotated-types
|
||||||
|
|
||||||
|
[](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
|
||||||
|
[](https://pypi.python.org/pypi/annotated-types)
|
||||||
|
[](https://github.com/annotated-types/annotated-types)
|
||||||
|
[](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
|
||||||
|
|
||||||
|
[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
|
||||||
|
adding context-specific metadata to existing types, and specifies that
|
||||||
|
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
|
||||||
|
logic for `x`.
|
||||||
|
|
||||||
|
This package provides metadata objects which can be used to represent common
|
||||||
|
constraints such as upper and lower bounds on scalar values and collection sizes,
|
||||||
|
a `Predicate` marker for runtime checks, and
|
||||||
|
descriptions of how we intend these metadata to be interpreted. In some cases,
|
||||||
|
we also note alternative representations which do not require this package.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install annotated-types
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```python
|
||||||
|
from typing import Annotated
|
||||||
|
from annotated_types import Gt, Len, Predicate
|
||||||
|
|
||||||
|
class MyClass:
|
||||||
|
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
|
||||||
|
# Invalid: 17, 18, "19", 19.0, ...
|
||||||
|
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
|
||||||
|
# Invalid: 4, 8, -2, 5.0, "prime", ...
|
||||||
|
|
||||||
|
my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
|
||||||
|
# Invalid: (1, 2), ["abc"], [0] * 20
|
||||||
|
```
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
_While `annotated-types` avoids runtime checks for performance, users should not
|
||||||
|
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
|
||||||
|
Downstream implementors may choose to raise an error, emit a warning, silently ignore
|
||||||
|
a metadata item, etc., if the metadata objects described below are used with an
|
||||||
|
incompatible type - or for any other reason!_
|
||||||
|
|
||||||
|
### Gt, Ge, Lt, Le
|
||||||
|
|
||||||
|
Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
|
||||||
|
dates, times, strings, sets, etc. Note that the boundary value need not be of the
|
||||||
|
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
|
||||||
|
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
|
||||||
|
|
||||||
|
We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
|
||||||
|
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
|
||||||
|
the `annotated-types` package.
|
||||||
|
|
||||||
|
To be explicit, these types have the following meanings:
|
||||||
|
|
||||||
|
* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
|
||||||
|
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
|
||||||
|
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
|
||||||
|
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
|
||||||
|
|
||||||
|
### Interval
|
||||||
|
|
||||||
|
`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
|
||||||
|
metadata object. `None` attributes should be ignored, and non-`None` attributes
|
||||||
|
treated as per the single bounds above.
|
||||||
|
|
||||||
|
### MultipleOf
|
||||||
|
|
||||||
|
`MultipleOf(multiple_of=x)` might be interpreted in two ways:
|
||||||
|
|
||||||
|
1. Python semantics, implying `value % multiple_of == 0`, or
|
||||||
|
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
|
||||||
|
where `int(value / multiple_of) == value / multiple_of`.
|
||||||
|
|
||||||
|
We encourage users to be aware of these two common interpretations and their
|
||||||
|
distinct behaviours, especially since very large or non-integer numbers make
|
||||||
|
it easy to cause silent data corruption due to floating-point imprecision.
|
||||||
|
|
||||||
|
We encourage libraries to carefully document which interpretation they implement.
|
||||||
|
|
||||||
|
### MinLen, MaxLen, Len
|
||||||
|
|
||||||
|
`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
|
||||||
|
|
||||||
|
As well as `Len()` which can optionally include upper and lower bounds, we also
|
||||||
|
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
|
||||||
|
and `Len(max_length=y)` respectively.
|
||||||
|
|
||||||
|
`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
|
||||||
|
|
||||||
|
Examples of usage:
|
||||||
|
|
||||||
|
* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
|
||||||
|
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
|
||||||
|
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
|
||||||
|
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
|
||||||
|
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
|
||||||
|
|
||||||
|
#### Changed in v0.4.0
|
||||||
|
|
||||||
|
* `min_inclusive` has been renamed to `min_length`, no change in meaning
|
||||||
|
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
|
||||||
|
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
|
||||||
|
meaning of the upper bound in slices vs. `Len`
|
||||||
|
|
||||||
|
See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
|
||||||
|
|
||||||
|
### Timezone
|
||||||
|
|
||||||
|
`Timezone` can be used with a `datetime` or a `time` to express which timezones
|
||||||
|
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
|
||||||
|
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
|
||||||
|
expresses that any timezone-aware datetime is allowed. You may also pass a specific
|
||||||
|
timezone string or `timezone` object such as `Timezone(timezone.utc)` or
|
||||||
|
`Timezone("Africa/Abidjan")` to express that you only allow a specific timezone,
|
||||||
|
though we note that this is often a symptom of fragile design.
|
||||||
|
|
||||||
|
### Predicate
|
||||||
|
|
||||||
|
`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
|
||||||
|
Users should prefer the statically inspectable metadata above, but if you need
|
||||||
|
the full power and flexibility of arbitrary runtime predicates... here it is.
|
||||||
|
|
||||||
|
We provide a few predefined predicates for common string constraints:
|
||||||
|
`IsLower = Predicate(str.islower)`, `IsUpper = Predicate(str.isupper)`, and
|
||||||
|
`IsDigit = Predicate(str.isdigit)`.
|
||||||
|
Some libraries might have special logic to handle known or understandable predicates,
|
||||||
|
for example by checking for `str.isdigit` and using its presence to both call custom
|
||||||
|
logic to enforce digit-only strings, and customise some generated external schema.
|
||||||
|
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
|
||||||
|
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
|
||||||
|
|
||||||
|
We do not specify what behaviour should be expected for predicates that raise
|
||||||
|
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||||
|
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||||
|
and then propogate or discard the resulting
|
||||||
|
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
|
||||||
|
exception. We encourage libraries to document the behaviour they choose.
|
||||||
|
|
||||||
|
### Integrating downstream types with `GroupedMetadata`
|
||||||
|
|
||||||
|
Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
|
||||||
|
This can help reduce verbosity and cognitive overhead for users.
|
||||||
|
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Iterator
|
||||||
|
from annotated_types import GroupedMetadata, Ge
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Field(GroupedMetadata):
|
||||||
|
ge: int | None = None
|
||||||
|
description: str | None = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[object]:
|
||||||
|
# Iterating over a GroupedMetadata object should yield annotated-types
|
||||||
|
# constraint metadata objects which describe it as fully as possible,
|
||||||
|
# and may include other unknown objects too.
|
||||||
|
if self.ge is not None:
|
||||||
|
yield Ge(self.ge)
|
||||||
|
if self.description is not None:
|
||||||
|
yield Description(self.description)
|
||||||
|
```
|
||||||
|
|
||||||
|
Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
|
||||||
|
|
||||||
|
Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
|
||||||
|
|
||||||
|
Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
|
||||||
|
|
||||||
|
### Consuming metadata
|
||||||
|
|
||||||
|
We intend to not be perspcriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
|
||||||
|
|
||||||
|
It is up to the implementer to determine how this metadata is used.
|
||||||
|
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
|
||||||
|
|
||||||
|
## Design & History
|
||||||
|
|
||||||
|
This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
|
||||||
|
and Hypothesis, with the goal of making it as easy as possible for end-users to
|
||||||
|
provide more informative annotations for use by runtime libraries.
|
||||||
|
|
||||||
|
It is deliberately minimal, and following PEP-593 allows considerable downstream
|
||||||
|
discretion in what (if anything!) they choose to support. Nonetheless, we expect
|
||||||
|
that staying simple and covering _only_ the most common use-cases will give users
|
||||||
|
and maintainers the best experience we can. If you'd like more constraints for your
|
||||||
|
types - follow our lead, by defining them and documenting them downstream!
|
|
@ -0,0 +1,10 @@
|
||||||
|
annotated_types-0.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
annotated_types-0.5.0.dist-info/METADATA,sha256=ie2NTBuiFKST5M2yUVgOgymZkQ5iad82BPFcZ07ZKBQ,11685
|
||||||
|
annotated_types-0.5.0.dist-info/RECORD,,
|
||||||
|
annotated_types-0.5.0.dist-info/WHEEL,sha256=y1bSCq4r5i4nMmpXeUJMqs3ipKvkZObrIXSvJHm1qCI,87
|
||||||
|
annotated_types-0.5.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
|
||||||
|
annotated_types/__init__.py,sha256=qQOWO1uHTw0QjOL1Ggcs92oY4OPX0G7srTdq_M5lC-8,9375
|
||||||
|
annotated_types/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
annotated_types/__pycache__/test_cases.cpython-310.pyc,,
|
||||||
|
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
annotated_types/test_cases.py,sha256=z-ftQXel0GT9FjAN2PVXi8ZUVV7F1T9PvwZ5l-UAqpU,5680
|
|
@ -0,0 +1,4 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: hatchling 1.17.0
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2022 the contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
|
@ -0,0 +1,319 @@
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timezone
|
||||||
|
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, TypeVar, Union
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
from typing_extensions import Protocol, runtime_checkable
|
||||||
|
else:
|
||||||
|
from typing import Protocol, runtime_checkable
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
from typing_extensions import Annotated, Literal
|
||||||
|
else:
|
||||||
|
from typing import Annotated, Literal
|
||||||
|
|
||||||
|
if sys.version_info < (3, 10):
|
||||||
|
EllipsisType = type(Ellipsis)
|
||||||
|
KW_ONLY = {}
|
||||||
|
SLOTS = {}
|
||||||
|
else:
|
||||||
|
from types import EllipsisType
|
||||||
|
|
||||||
|
KW_ONLY = {"kw_only": True}
|
||||||
|
SLOTS = {"slots": True}
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'BaseMetadata',
|
||||||
|
'GroupedMetadata',
|
||||||
|
'Gt',
|
||||||
|
'Ge',
|
||||||
|
'Lt',
|
||||||
|
'Le',
|
||||||
|
'Interval',
|
||||||
|
'MultipleOf',
|
||||||
|
'MinLen',
|
||||||
|
'MaxLen',
|
||||||
|
'Len',
|
||||||
|
'Timezone',
|
||||||
|
'Predicate',
|
||||||
|
'LowerCase',
|
||||||
|
'UpperCase',
|
||||||
|
'IsDigits',
|
||||||
|
'__version__',
|
||||||
|
)
|
||||||
|
|
||||||
|
__version__ = '0.5.0'
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
# arguments that start with __ are considered
|
||||||
|
# positional only
|
||||||
|
# see https://peps.python.org/pep-0484/#positional-only-arguments
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsGt(Protocol):
|
||||||
|
def __gt__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsGe(Protocol):
|
||||||
|
def __ge__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsLt(Protocol):
|
||||||
|
def __lt__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsLe(Protocol):
|
||||||
|
def __le__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsMod(Protocol):
|
||||||
|
def __mod__(self: T, __other: T) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsDiv(Protocol):
|
||||||
|
def __div__(self: T, __other: T) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class BaseMetadata:
|
||||||
|
"""Base class for all metadata.
|
||||||
|
|
||||||
|
This exists mainly so that implementers
|
||||||
|
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Gt(BaseMetadata):
|
||||||
|
"""Gt(gt=x) implies that the value must be greater than x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``>`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gt: SupportsGt
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Ge(BaseMetadata):
|
||||||
|
"""Ge(ge=x) implies that the value must be greater than or equal to x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``>=`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ge: SupportsGe
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Lt(BaseMetadata):
|
||||||
|
"""Lt(lt=x) implies that the value must be less than x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``<`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
lt: SupportsLt
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Le(BaseMetadata):
|
||||||
|
"""Le(le=x) implies that the value must be less than or equal to x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``<=`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
le: SupportsLe
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class GroupedMetadata(Protocol):
|
||||||
|
"""A grouping of multiple BaseMetadata objects.
|
||||||
|
|
||||||
|
`GroupedMetadata` on its own is not metadata and has no meaning.
|
||||||
|
All it the the constraint and metadata should be fully expressable
|
||||||
|
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
|
||||||
|
|
||||||
|
Concrete implementations should override `GroupedMetadata.__iter__()`
|
||||||
|
to add their own metadata.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> @dataclass
|
||||||
|
>>> class Field(GroupedMetadata):
|
||||||
|
>>> gt: float | None = None
|
||||||
|
>>> description: str | None = None
|
||||||
|
...
|
||||||
|
>>> def __iter__(self) -> Iterable[BaseMetadata]:
|
||||||
|
>>> if self.gt is not None:
|
||||||
|
>>> yield Gt(self.gt)
|
||||||
|
>>> if self.description is not None:
|
||||||
|
>>> yield Description(self.gt)
|
||||||
|
|
||||||
|
Also see the implementation of `Interval` below for an example.
|
||||||
|
|
||||||
|
Parsers should recognize this and unpack it so that it can be used
|
||||||
|
both with and without unpacking:
|
||||||
|
|
||||||
|
- `Annotated[int, Field(...)]` (parser must unpack Field)
|
||||||
|
- `Annotated[int, *Field(...)]` (PEP-646)
|
||||||
|
""" # noqa: trailing-whitespace
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
...
|
||||||
|
|
||||||
|
if not TYPE_CHECKING:
|
||||||
|
__slots__ = () # allow subclasses to use slots
|
||||||
|
|
||||||
|
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
|
||||||
|
# Basic ABC like functionality without the complexity of an ABC
|
||||||
|
super().__init_subclass__(*args, **kwargs)
|
||||||
|
if cls.__iter__ is GroupedMetadata.__iter__:
|
||||||
|
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811
|
||||||
|
raise NotImplementedError # more helpful than "None has no attribute..." type errors
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
|
||||||
|
class Interval(GroupedMetadata):
|
||||||
|
"""Interval can express inclusive or exclusive bounds with a single object.
|
||||||
|
|
||||||
|
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
|
||||||
|
are interpreted the same way as the single-bound constraints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gt: Union[SupportsGt, None] = None
|
||||||
|
ge: Union[SupportsGe, None] = None
|
||||||
|
lt: Union[SupportsLt, None] = None
|
||||||
|
le: Union[SupportsLe, None] = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
"""Unpack an Interval into zero or more single-bounds."""
|
||||||
|
if self.gt is not None:
|
||||||
|
yield Gt(self.gt)
|
||||||
|
if self.ge is not None:
|
||||||
|
yield Ge(self.ge)
|
||||||
|
if self.lt is not None:
|
||||||
|
yield Lt(self.lt)
|
||||||
|
if self.le is not None:
|
||||||
|
yield Le(self.le)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MultipleOf(BaseMetadata):
|
||||||
|
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
|
||||||
|
|
||||||
|
1. Python semantics, implying ``value % multiple_of == 0``, or
|
||||||
|
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
|
||||||
|
|
||||||
|
We encourage users to be aware of these two common interpretations,
|
||||||
|
and libraries to carefully document which they implement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
multiple_of: Union[SupportsDiv, SupportsMod]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MinLen(BaseMetadata):
|
||||||
|
"""
|
||||||
|
MinLen() implies minimum inclusive length,
|
||||||
|
e.g. ``len(value) >= min_length``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
min_length: Annotated[int, Ge(0)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MaxLen(BaseMetadata):
|
||||||
|
"""
|
||||||
|
MaxLen() implies maximum inclusive length,
|
||||||
|
e.g. ``len(value) <= max_length``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_length: Annotated[int, Ge(0)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Len(GroupedMetadata):
|
||||||
|
"""
|
||||||
|
Len() implies that ``min_length <= len(value) <= max_length``.
|
||||||
|
|
||||||
|
Upper bound may be omitted or ``None`` to indicate no upper length bound.
|
||||||
|
"""
|
||||||
|
|
||||||
|
min_length: Annotated[int, Ge(0)] = 0
|
||||||
|
max_length: Optional[Annotated[int, Ge(0)]] = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
"""Unpack a Len into zone or more single-bounds."""
|
||||||
|
if self.min_length > 0:
|
||||||
|
yield MinLen(self.min_length)
|
||||||
|
if self.max_length is not None:
|
||||||
|
yield MaxLen(self.max_length)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Timezone(BaseMetadata):
|
||||||
|
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
|
||||||
|
|
||||||
|
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
|
||||||
|
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
|
||||||
|
tz-aware but any timezone is allowed.
|
||||||
|
|
||||||
|
You may also pass a specific timezone string or timezone object such as
|
||||||
|
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
|
||||||
|
you only allow a specific timezone, though we note that this is often
|
||||||
|
a symptom of poor design.
|
||||||
|
"""
|
||||||
|
|
||||||
|
tz: Union[str, timezone, EllipsisType, None]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Predicate(BaseMetadata):
|
||||||
|
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
|
||||||
|
|
||||||
|
Users should prefer statically inspectable metadata, but if you need the full
|
||||||
|
power and flexibility of arbitrary runtime predicates... here it is.
|
||||||
|
|
||||||
|
We provide a few predefined predicates for common string constraints:
|
||||||
|
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
|
||||||
|
``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which
|
||||||
|
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
|
||||||
|
|
||||||
|
Some libraries might have special logic to handle certain predicates, e.g. by
|
||||||
|
checking for `str.isdigit` and using its presence to both call custom logic to
|
||||||
|
enforce digit-only strings, and customise some generated external schema.
|
||||||
|
|
||||||
|
We do not specify what behaviour should be expected for predicates that raise
|
||||||
|
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||||
|
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||||
|
and then propogate or discard the resulting exception.
|
||||||
|
"""
|
||||||
|
|
||||||
|
func: Callable[[Any], bool]
|
||||||
|
|
||||||
|
|
||||||
|
StrType = TypeVar("StrType", bound=str)
|
||||||
|
|
||||||
|
LowerCase = Annotated[StrType, Predicate(str.islower)]
|
||||||
|
UpperCase = Annotated[StrType, Predicate(str.isupper)]
|
||||||
|
IsDigits = Annotated[StrType, Predicate(str.isdigit)]
|
||||||
|
IsAscii = Annotated[StrType, Predicate(str.isascii)]
|
|
@ -0,0 +1,133 @@
|
||||||
|
import sys
|
||||||
|
from datetime import date, datetime, timedelta, timezone
|
||||||
|
from decimal import Decimal
|
||||||
|
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
from typing_extensions import Annotated
|
||||||
|
else:
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
import annotated_types as at
|
||||||
|
|
||||||
|
|
||||||
|
class Case(NamedTuple):
|
||||||
|
"""
|
||||||
|
A test case for `annotated_types`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
annotation: Any
|
||||||
|
valid_cases: Iterable[Any]
|
||||||
|
invalid_cases: Iterable[Any]
|
||||||
|
|
||||||
|
|
||||||
|
def cases() -> Iterable[Case]:
|
||||||
|
# Gt, Ge, Lt, Le
|
||||||
|
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
|
||||||
|
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(date(2000, 1, 1))],
|
||||||
|
[date(2000, 1, 2), date(2000, 1, 3)],
|
||||||
|
[date(2000, 1, 1), date(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(Decimal('1.123'))],
|
||||||
|
[Decimal('1.1231'), Decimal('123')],
|
||||||
|
[Decimal('1.123'), Decimal('0')],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
|
||||||
|
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
|
||||||
|
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
|
||||||
|
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
|
||||||
|
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Interval
|
||||||
|
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
|
||||||
|
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
|
||||||
|
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
|
||||||
|
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
|
||||||
|
|
||||||
|
# lengths
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||||
|
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||||
|
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||||
|
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
|
||||||
|
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
|
||||||
|
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||||
|
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
|
||||||
|
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
|
||||||
|
|
||||||
|
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
|
||||||
|
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
|
||||||
|
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
|
||||||
|
|
||||||
|
# Timezone
|
||||||
|
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(timezone.utc)],
|
||||||
|
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone('Europe/London')],
|
||||||
|
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||||
|
)
|
||||||
|
|
||||||
|
# predicate types
|
||||||
|
|
||||||
|
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
|
||||||
|
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
|
||||||
|
yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2'])
|
||||||
|
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
|
||||||
|
|
||||||
|
# custom GroupedMetadata
|
||||||
|
class MyCustomGroupedMetadata(at.GroupedMetadata):
|
||||||
|
def __iter__(self) -> Iterator[at.Predicate]:
|
||||||
|
yield at.Predicate(lambda x: float(x).is_integer())
|
||||||
|
|
||||||
|
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,20 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Alex Grönholm
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,105 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: anyio
|
||||||
|
Version: 3.7.1
|
||||||
|
Summary: High level compatibility layer for multiple asynchronous event loop implementations
|
||||||
|
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
|
||||||
|
License: MIT
|
||||||
|
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
|
||||||
|
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
|
||||||
|
Project-URL: Source code, https://github.com/agronholm/anyio
|
||||||
|
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Framework :: AnyIO
|
||||||
|
Classifier: Typing :: Typed
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: idna (>=2.8)
|
||||||
|
Requires-Dist: sniffio (>=1.1)
|
||||||
|
Requires-Dist: exceptiongroup ; python_version < "3.11"
|
||||||
|
Requires-Dist: typing-extensions ; python_version < "3.8"
|
||||||
|
Provides-Extra: doc
|
||||||
|
Requires-Dist: packaging ; extra == 'doc'
|
||||||
|
Requires-Dist: Sphinx ; extra == 'doc'
|
||||||
|
Requires-Dist: sphinx-rtd-theme (>=1.2.2) ; extra == 'doc'
|
||||||
|
Requires-Dist: sphinxcontrib-jquery ; extra == 'doc'
|
||||||
|
Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc'
|
||||||
|
Provides-Extra: test
|
||||||
|
Requires-Dist: anyio[trio] ; extra == 'test'
|
||||||
|
Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test'
|
||||||
|
Requires-Dist: hypothesis (>=4.0) ; extra == 'test'
|
||||||
|
Requires-Dist: psutil (>=5.9) ; extra == 'test'
|
||||||
|
Requires-Dist: pytest (>=7.0) ; extra == 'test'
|
||||||
|
Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test'
|
||||||
|
Requires-Dist: trustme ; extra == 'test'
|
||||||
|
Requires-Dist: uvloop (>=0.17) ; (python_version < "3.12" and platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test'
|
||||||
|
Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test'
|
||||||
|
Provides-Extra: trio
|
||||||
|
Requires-Dist: trio (<0.22) ; extra == 'trio'
|
||||||
|
|
||||||
|
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
|
||||||
|
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
|
||||||
|
:alt: Build Status
|
||||||
|
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
|
||||||
|
:target: https://coveralls.io/github/agronholm/anyio?branch=master
|
||||||
|
:alt: Code Coverage
|
||||||
|
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
|
||||||
|
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
|
||||||
|
:alt: Documentation
|
||||||
|
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
|
||||||
|
:target: https://gitter.im/python-trio/AnyIO
|
||||||
|
:alt: Gitter chat
|
||||||
|
|
||||||
|
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
|
||||||
|
trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
|
||||||
|
with the native SC of trio itself.
|
||||||
|
|
||||||
|
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
|
||||||
|
trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
|
||||||
|
refactoring necessary. It will blend in with the native libraries of your chosen backend.
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
View full documentation at: https://anyio.readthedocs.io/
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
AnyIO offers the following functionality:
|
||||||
|
|
||||||
|
* Task groups (nurseries_ in trio terminology)
|
||||||
|
* High-level networking (TCP, UDP and UNIX sockets)
|
||||||
|
|
||||||
|
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
|
||||||
|
3.8)
|
||||||
|
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
|
||||||
|
Protocols)
|
||||||
|
|
||||||
|
* A versatile API for byte streams and object streams
|
||||||
|
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
|
||||||
|
streams)
|
||||||
|
* Worker threads
|
||||||
|
* Subprocesses
|
||||||
|
* Asynchronous file I/O (using worker threads)
|
||||||
|
* Signal handling
|
||||||
|
|
||||||
|
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
|
||||||
|
It even works with the popular Hypothesis_ library.
|
||||||
|
|
||||||
|
.. _asyncio: https://docs.python.org/3/library/asyncio.html
|
||||||
|
.. _trio: https://github.com/python-trio/trio
|
||||||
|
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
||||||
|
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
|
||||||
|
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
|
||||||
|
.. _pytest: https://docs.pytest.org/en/latest/
|
||||||
|
.. _Hypothesis: https://hypothesis.works/
|
|
@ -0,0 +1,82 @@
|
||||||
|
anyio-3.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
anyio-3.7.1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
|
||||||
|
anyio-3.7.1.dist-info/METADATA,sha256=mOhfXPB7qKVQh3dUtp2NgLysa10jHWeDBNnRg-93A_c,4708
|
||||||
|
anyio-3.7.1.dist-info/RECORD,,
|
||||||
|
anyio-3.7.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
||||||
|
anyio-3.7.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
|
||||||
|
anyio-3.7.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
|
||||||
|
anyio/__init__.py,sha256=Pq9lO03Zm5ynIPlhkquaOuIc1dTTeLGNUQ5HT5qwYMI,4073
|
||||||
|
anyio/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
anyio/__pycache__/from_thread.cpython-310.pyc,,
|
||||||
|
anyio/__pycache__/lowlevel.cpython-310.pyc,,
|
||||||
|
anyio/__pycache__/pytest_plugin.cpython-310.pyc,,
|
||||||
|
anyio/__pycache__/to_process.cpython-310.pyc,,
|
||||||
|
anyio/__pycache__/to_thread.cpython-310.pyc,,
|
||||||
|
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/_backends/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
anyio/_backends/__pycache__/_asyncio.cpython-310.pyc,,
|
||||||
|
anyio/_backends/__pycache__/_trio.cpython-310.pyc,,
|
||||||
|
anyio/_backends/_asyncio.py,sha256=fgwZmYnGOxT_pX0OZTPPgRdFqKLjnKvQUk7tsfuNmfM,67056
|
||||||
|
anyio/_backends/_trio.py,sha256=EJAj0tNi0JRM2y3QWP7oS4ct7wnjMSYDG8IZUWMta-E,30035
|
||||||
|
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/_core/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_compat.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_eventloop.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_exceptions.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_fileio.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_resources.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_signals.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_sockets.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_streams.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_subprocesses.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_synchronization.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_tasks.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_testing.cpython-310.pyc,,
|
||||||
|
anyio/_core/__pycache__/_typedattr.cpython-310.pyc,,
|
||||||
|
anyio/_core/_compat.py,sha256=XZfBUInEt7jaiTBI2Qbul7EpJdngbwTtG4Qj26un1YE,5726
|
||||||
|
anyio/_core/_eventloop.py,sha256=xJ8KflV1bJ9GAuQRr4o1ojv8wWya4nt_XARta8uLPwc,4083
|
||||||
|
anyio/_core/_exceptions.py,sha256=uOrN5l98o6UrOU6O3kPf0VCDl_zPP-kgZs4IyaLVgwU,2916
|
||||||
|
anyio/_core/_fileio.py,sha256=DWuIul5izCocmJpgqDDNKc_GhMUwayHKdM5R-sbT_A8,18026
|
||||||
|
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
|
||||||
|
anyio/_core/_signals.py,sha256=KKkZAYL08auydjZnK9S4FQsxx555jT4gXAMcTXdNaok,863
|
||||||
|
anyio/_core/_sockets.py,sha256=szcPd7kKBmlHnx8g_KJWZo2k6syouRNF2614ZrtqiV0,20667
|
||||||
|
anyio/_core/_streams.py,sha256=5gryxQiUisED8uFUAHje5O44RL9wyndNMANzzQWUn1U,1518
|
||||||
|
anyio/_core/_subprocesses.py,sha256=OSAcLAsjfCplXlRyTjWonfS1xU8d5MaZblXYqqY-BM4,4977
|
||||||
|
anyio/_core/_synchronization.py,sha256=Uquo_52vZ7iZzDDoaN_j-N7jeyAlefzOZ8Pxt9mU6gY,16747
|
||||||
|
anyio/_core/_tasks.py,sha256=1wZZWlpDkr6w3kMD629vzJDkPselDvx4XVElgTCVwyM,5316
|
||||||
|
anyio/_core/_testing.py,sha256=7Yll-DOI0uIlIF5VHLUpGGyDPWtDEjFZ85-6ZniwIJU,2217
|
||||||
|
anyio/_core/_typedattr.py,sha256=8o0gwQYSl04zlO9uHqcHu1T6hOw7peY9NW1mOX5DKnY,2551
|
||||||
|
anyio/abc/__init__.py,sha256=UkC-KDbyIoKeDUDhJciwANSoyzz_qaFh4Fb7_AvwjZc,2159
|
||||||
|
anyio/abc/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
anyio/abc/__pycache__/_resources.cpython-310.pyc,,
|
||||||
|
anyio/abc/__pycache__/_sockets.cpython-310.pyc,,
|
||||||
|
anyio/abc/__pycache__/_streams.cpython-310.pyc,,
|
||||||
|
anyio/abc/__pycache__/_subprocesses.cpython-310.pyc,,
|
||||||
|
anyio/abc/__pycache__/_tasks.cpython-310.pyc,,
|
||||||
|
anyio/abc/__pycache__/_testing.cpython-310.pyc,,
|
||||||
|
anyio/abc/_resources.py,sha256=h1rkzr3E0MFqdXLh9aLLXe-A5W7k_Jc-5XzNr6SJ4w4,763
|
||||||
|
anyio/abc/_sockets.py,sha256=WWYJ6HndKCEuvobAPDkmX0tjwN2FOxf3eTGb1DB7wHE,5243
|
||||||
|
anyio/abc/_streams.py,sha256=yGhOmlVI3W9whmzPuewwYQ2BrKhrUFuWZ4zpVLWOK84,6584
|
||||||
|
anyio/abc/_subprocesses.py,sha256=r-totaRbFX6kKV-4WTeuswz8n01aap8cvkYVQCRKN0M,2067
|
||||||
|
anyio/abc/_tasks.py,sha256=a_5DLyiCbp0K57LJPOyF-PZyXmUcv_p9VRXPFj_K03M,3413
|
||||||
|
anyio/abc/_testing.py,sha256=Eub7gXJ0tVPo_WN5iJAw10FrvC7C1uaL3b2neGr_pfs,1924
|
||||||
|
anyio/from_thread.py,sha256=aUVKXctPgZ5wK3p5VTyrtjDj9tSQSrH6xCjBuo-hv3A,16563
|
||||||
|
anyio/lowlevel.py,sha256=cOTncxRW5KeswqYQQdp0pfAw6OFWXius1SPhCYwHZL4,4647
|
||||||
|
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/pytest_plugin.py,sha256=_Txgl0-I3kO1rk_KATXmIUV57C34hajcJCGcgV26CU0,5022
|
||||||
|
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/streams/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
anyio/streams/__pycache__/buffered.cpython-310.pyc,,
|
||||||
|
anyio/streams/__pycache__/file.cpython-310.pyc,,
|
||||||
|
anyio/streams/__pycache__/memory.cpython-310.pyc,,
|
||||||
|
anyio/streams/__pycache__/stapled.cpython-310.pyc,,
|
||||||
|
anyio/streams/__pycache__/text.cpython-310.pyc,,
|
||||||
|
anyio/streams/__pycache__/tls.cpython-310.pyc,,
|
||||||
|
anyio/streams/buffered.py,sha256=2ifplNLwT73d1UKBxrkFdlC9wTAze9LhPL7pt_7cYgY,4473
|
||||||
|
anyio/streams/file.py,sha256=-NP6jMcUd2f1VJwgcxgiRHdEsNnhE0lANl0ov_i7FrE,4356
|
||||||
|
anyio/streams/memory.py,sha256=QZhc5qdomBpGCgrUVWAaqEBxI0oklVxK_62atW6tnNk,9274
|
||||||
|
anyio/streams/stapled.py,sha256=9u2GxpiOPsGtgO1qsj2tVoW4b8bgiwp5rSDs1BFKkLM,4275
|
||||||
|
anyio/streams/text.py,sha256=1K4ZCLKl2b7yywrW6wKEeMu3xyQHE_T0aU5_oC9GPTE,5043
|
||||||
|
anyio/streams/tls.py,sha256=TbdCz1KtfEnp3mxHvkROXRefhE6S1LHiwgWiJX8zYaU,12099
|
||||||
|
anyio/to_process.py,sha256=_RSsG8UME2nGxeFEdg3OEfv9XshSQwrMU7DAbwWGx9U,9242
|
||||||
|
anyio/to_thread.py,sha256=HVpTvBei2sSXgJJeNKdwhJwQaW76LDbb1htQ-Mc6zDs,2146
|
|
@ -0,0 +1,5 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.40.0)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
[pytest11]
|
||||||
|
anyio = anyio.pytest_plugin
|
|
@ -0,0 +1 @@
|
||||||
|
anyio
|
|
@ -0,0 +1,169 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"maybe_async",
|
||||||
|
"maybe_async_cm",
|
||||||
|
"run",
|
||||||
|
"sleep",
|
||||||
|
"sleep_forever",
|
||||||
|
"sleep_until",
|
||||||
|
"current_time",
|
||||||
|
"get_all_backends",
|
||||||
|
"get_cancelled_exc_class",
|
||||||
|
"BrokenResourceError",
|
||||||
|
"BrokenWorkerProcess",
|
||||||
|
"BusyResourceError",
|
||||||
|
"ClosedResourceError",
|
||||||
|
"DelimiterNotFound",
|
||||||
|
"EndOfStream",
|
||||||
|
"ExceptionGroup",
|
||||||
|
"IncompleteRead",
|
||||||
|
"TypedAttributeLookupError",
|
||||||
|
"WouldBlock",
|
||||||
|
"AsyncFile",
|
||||||
|
"Path",
|
||||||
|
"open_file",
|
||||||
|
"wrap_file",
|
||||||
|
"aclose_forcefully",
|
||||||
|
"open_signal_receiver",
|
||||||
|
"connect_tcp",
|
||||||
|
"connect_unix",
|
||||||
|
"create_tcp_listener",
|
||||||
|
"create_unix_listener",
|
||||||
|
"create_udp_socket",
|
||||||
|
"create_connected_udp_socket",
|
||||||
|
"getaddrinfo",
|
||||||
|
"getnameinfo",
|
||||||
|
"wait_socket_readable",
|
||||||
|
"wait_socket_writable",
|
||||||
|
"create_memory_object_stream",
|
||||||
|
"run_process",
|
||||||
|
"open_process",
|
||||||
|
"create_lock",
|
||||||
|
"CapacityLimiter",
|
||||||
|
"CapacityLimiterStatistics",
|
||||||
|
"Condition",
|
||||||
|
"ConditionStatistics",
|
||||||
|
"Event",
|
||||||
|
"EventStatistics",
|
||||||
|
"Lock",
|
||||||
|
"LockStatistics",
|
||||||
|
"Semaphore",
|
||||||
|
"SemaphoreStatistics",
|
||||||
|
"create_condition",
|
||||||
|
"create_event",
|
||||||
|
"create_semaphore",
|
||||||
|
"create_capacity_limiter",
|
||||||
|
"open_cancel_scope",
|
||||||
|
"fail_after",
|
||||||
|
"move_on_after",
|
||||||
|
"current_effective_deadline",
|
||||||
|
"TASK_STATUS_IGNORED",
|
||||||
|
"CancelScope",
|
||||||
|
"create_task_group",
|
||||||
|
"TaskInfo",
|
||||||
|
"get_current_task",
|
||||||
|
"get_running_tasks",
|
||||||
|
"wait_all_tasks_blocked",
|
||||||
|
"run_sync_in_worker_thread",
|
||||||
|
"run_async_from_thread",
|
||||||
|
"run_sync_from_thread",
|
||||||
|
"current_default_worker_thread_limiter",
|
||||||
|
"create_blocking_portal",
|
||||||
|
"start_blocking_portal",
|
||||||
|
"typed_attribute",
|
||||||
|
"TypedAttributeSet",
|
||||||
|
"TypedAttributeProvider",
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from ._core._compat import maybe_async, maybe_async_cm
|
||||||
|
from ._core._eventloop import (
|
||||||
|
current_time,
|
||||||
|
get_all_backends,
|
||||||
|
get_cancelled_exc_class,
|
||||||
|
run,
|
||||||
|
sleep,
|
||||||
|
sleep_forever,
|
||||||
|
sleep_until,
|
||||||
|
)
|
||||||
|
from ._core._exceptions import (
|
||||||
|
BrokenResourceError,
|
||||||
|
BrokenWorkerProcess,
|
||||||
|
BusyResourceError,
|
||||||
|
ClosedResourceError,
|
||||||
|
DelimiterNotFound,
|
||||||
|
EndOfStream,
|
||||||
|
ExceptionGroup,
|
||||||
|
IncompleteRead,
|
||||||
|
TypedAttributeLookupError,
|
||||||
|
WouldBlock,
|
||||||
|
)
|
||||||
|
from ._core._fileio import AsyncFile, Path, open_file, wrap_file
|
||||||
|
from ._core._resources import aclose_forcefully
|
||||||
|
from ._core._signals import open_signal_receiver
|
||||||
|
from ._core._sockets import (
|
||||||
|
connect_tcp,
|
||||||
|
connect_unix,
|
||||||
|
create_connected_udp_socket,
|
||||||
|
create_tcp_listener,
|
||||||
|
create_udp_socket,
|
||||||
|
create_unix_listener,
|
||||||
|
getaddrinfo,
|
||||||
|
getnameinfo,
|
||||||
|
wait_socket_readable,
|
||||||
|
wait_socket_writable,
|
||||||
|
)
|
||||||
|
from ._core._streams import create_memory_object_stream
|
||||||
|
from ._core._subprocesses import open_process, run_process
|
||||||
|
from ._core._synchronization import (
|
||||||
|
CapacityLimiter,
|
||||||
|
CapacityLimiterStatistics,
|
||||||
|
Condition,
|
||||||
|
ConditionStatistics,
|
||||||
|
Event,
|
||||||
|
EventStatistics,
|
||||||
|
Lock,
|
||||||
|
LockStatistics,
|
||||||
|
Semaphore,
|
||||||
|
SemaphoreStatistics,
|
||||||
|
create_capacity_limiter,
|
||||||
|
create_condition,
|
||||||
|
create_event,
|
||||||
|
create_lock,
|
||||||
|
create_semaphore,
|
||||||
|
)
|
||||||
|
from ._core._tasks import (
|
||||||
|
TASK_STATUS_IGNORED,
|
||||||
|
CancelScope,
|
||||||
|
create_task_group,
|
||||||
|
current_effective_deadline,
|
||||||
|
fail_after,
|
||||||
|
move_on_after,
|
||||||
|
open_cancel_scope,
|
||||||
|
)
|
||||||
|
from ._core._testing import (
|
||||||
|
TaskInfo,
|
||||||
|
get_current_task,
|
||||||
|
get_running_tasks,
|
||||||
|
wait_all_tasks_blocked,
|
||||||
|
)
|
||||||
|
from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute
|
||||||
|
|
||||||
|
# Re-exported here, for backwards compatibility
|
||||||
|
# isort: off
|
||||||
|
from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread
|
||||||
|
from .from_thread import (
|
||||||
|
create_blocking_portal,
|
||||||
|
run_async_from_thread,
|
||||||
|
run_sync_from_thread,
|
||||||
|
start_blocking_portal,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-export imports so they look like they live directly in this package
|
||||||
|
key: str
|
||||||
|
value: Any
|
||||||
|
for key, value in list(locals().items()):
|
||||||
|
if getattr(value, "__module__", "").startswith("anyio."):
|
||||||
|
value.__module__ = __name__
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,996 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import array
|
||||||
|
import math
|
||||||
|
import socket
|
||||||
|
from concurrent.futures import Future
|
||||||
|
from contextvars import copy_context
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import partial
|
||||||
|
from io import IOBase
|
||||||
|
from os import PathLike
|
||||||
|
from signal import Signals
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
AsyncGenerator,
|
||||||
|
AsyncIterator,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Collection,
|
||||||
|
Coroutine,
|
||||||
|
Generic,
|
||||||
|
Iterable,
|
||||||
|
Mapping,
|
||||||
|
NoReturn,
|
||||||
|
Sequence,
|
||||||
|
TypeVar,
|
||||||
|
cast,
|
||||||
|
)
|
||||||
|
|
||||||
|
import sniffio
|
||||||
|
import trio.from_thread
|
||||||
|
from outcome import Error, Outcome, Value
|
||||||
|
from trio.socket import SocketType as TrioSocketType
|
||||||
|
from trio.to_thread import run_sync
|
||||||
|
|
||||||
|
from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
|
||||||
|
from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable
|
||||||
|
from .._core._eventloop import claim_worker_thread
|
||||||
|
from .._core._exceptions import (
|
||||||
|
BrokenResourceError,
|
||||||
|
BusyResourceError,
|
||||||
|
ClosedResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
)
|
||||||
|
from .._core._exceptions import ExceptionGroup as BaseExceptionGroup
|
||||||
|
from .._core._sockets import convert_ipv6_sockaddr
|
||||||
|
from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
|
||||||
|
from .._core._synchronization import Event as BaseEvent
|
||||||
|
from .._core._synchronization import ResourceGuard
|
||||||
|
from .._core._tasks import CancelScope as BaseCancelScope
|
||||||
|
from ..abc import IPSockAddrType, UDPPacketType
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
try:
|
||||||
|
from trio import lowlevel as trio_lowlevel
|
||||||
|
except ImportError:
|
||||||
|
from trio import hazmat as trio_lowlevel # type: ignore[no-redef]
|
||||||
|
from trio.hazmat import wait_readable, wait_writable
|
||||||
|
else:
|
||||||
|
from trio.lowlevel import wait_readable, wait_writable
|
||||||
|
|
||||||
|
try:
|
||||||
|
trio_open_process = trio_lowlevel.open_process
|
||||||
|
except AttributeError:
|
||||||
|
# isort: off
|
||||||
|
from trio import ( # type: ignore[attr-defined, no-redef]
|
||||||
|
open_process as trio_open_process,
|
||||||
|
)
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Event loop
|
||||||
|
#
|
||||||
|
|
||||||
|
run = trio.run
|
||||||
|
current_token = trio.lowlevel.current_trio_token
|
||||||
|
RunVar = trio.lowlevel.RunVar
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Miscellaneous
|
||||||
|
#
|
||||||
|
|
||||||
|
sleep = trio.sleep
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Timeouts and cancellation
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class CancelScope(BaseCancelScope):
|
||||||
|
def __new__(
|
||||||
|
cls, original: trio.CancelScope | None = None, **kwargs: object
|
||||||
|
) -> CancelScope:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None:
|
||||||
|
self.__original = original or trio.CancelScope(**kwargs)
|
||||||
|
|
||||||
|
def __enter__(self) -> CancelScope:
|
||||||
|
self.__original.__enter__()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
# https://github.com/python-trio/trio-typing/pull/79
|
||||||
|
return self.__original.__exit__( # type: ignore[func-returns-value]
|
||||||
|
exc_type, exc_val, exc_tb
|
||||||
|
)
|
||||||
|
|
||||||
|
def cancel(self) -> DeprecatedAwaitable:
|
||||||
|
self.__original.cancel()
|
||||||
|
return DeprecatedAwaitable(self.cancel)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deadline(self) -> float:
|
||||||
|
return self.__original.deadline
|
||||||
|
|
||||||
|
@deadline.setter
|
||||||
|
def deadline(self, value: float) -> None:
|
||||||
|
self.__original.deadline = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cancel_called(self) -> bool:
|
||||||
|
return self.__original.cancel_called
|
||||||
|
|
||||||
|
@property
|
||||||
|
def shield(self) -> bool:
|
||||||
|
return self.__original.shield
|
||||||
|
|
||||||
|
@shield.setter
|
||||||
|
def shield(self, value: bool) -> None:
|
||||||
|
self.__original.shield = value
|
||||||
|
|
||||||
|
|
||||||
|
CancelledError = trio.Cancelled
|
||||||
|
checkpoint = trio.lowlevel.checkpoint
|
||||||
|
checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled
|
||||||
|
cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint
|
||||||
|
current_effective_deadline = trio.current_effective_deadline
|
||||||
|
current_time = trio.current_time
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Task groups
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class ExceptionGroup(BaseExceptionGroup, trio.MultiError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TaskGroup(abc.TaskGroup):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._active = False
|
||||||
|
self._nursery_manager = trio.open_nursery()
|
||||||
|
self.cancel_scope = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
async def __aenter__(self) -> TaskGroup:
|
||||||
|
self._active = True
|
||||||
|
self._nursery = await self._nursery_manager.__aenter__()
|
||||||
|
self.cancel_scope = CancelScope(self._nursery.cancel_scope)
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
try:
|
||||||
|
return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb)
|
||||||
|
except trio.MultiError as exc:
|
||||||
|
raise ExceptionGroup(exc.exceptions) from None
|
||||||
|
finally:
|
||||||
|
self._active = False
|
||||||
|
|
||||||
|
def start_soon(
|
||||||
|
self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
|
||||||
|
) -> None:
|
||||||
|
if not self._active:
|
||||||
|
raise RuntimeError(
|
||||||
|
"This task group is not active; no new tasks can be started."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._nursery.start_soon(func, *args, name=name)
|
||||||
|
|
||||||
|
async def start(
|
||||||
|
self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
|
||||||
|
) -> object:
|
||||||
|
if not self._active:
|
||||||
|
raise RuntimeError(
|
||||||
|
"This task group is not active; no new tasks can be started."
|
||||||
|
)
|
||||||
|
|
||||||
|
return await self._nursery.start(func, *args, name=name)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Threads
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync_in_worker_thread(
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
*args: object,
|
||||||
|
cancellable: bool = False,
|
||||||
|
limiter: trio.CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
def wrapper() -> T_Retval:
|
||||||
|
with claim_worker_thread("trio"):
|
||||||
|
return func(*args)
|
||||||
|
|
||||||
|
# TODO: remove explicit context copying when trio 0.20 is the minimum requirement
|
||||||
|
context = copy_context()
|
||||||
|
context.run(sniffio.current_async_library_cvar.set, None)
|
||||||
|
return await run_sync(
|
||||||
|
context.run, wrapper, cancellable=cancellable, limiter=limiter
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: remove this workaround when trio 0.20 is the minimum requirement
|
||||||
|
def run_async_from_thread(
|
||||||
|
fn: Callable[..., Awaitable[T_Retval]], *args: Any
|
||||||
|
) -> T_Retval:
|
||||||
|
async def wrapper() -> T_Retval:
|
||||||
|
retval: T_Retval
|
||||||
|
|
||||||
|
async def inner() -> None:
|
||||||
|
nonlocal retval
|
||||||
|
__tracebackhide__ = True
|
||||||
|
retval = await fn(*args)
|
||||||
|
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
context.run(n.start_soon, inner)
|
||||||
|
|
||||||
|
__tracebackhide__ = True
|
||||||
|
return retval # noqa: F821
|
||||||
|
|
||||||
|
context = copy_context()
|
||||||
|
context.run(sniffio.current_async_library_cvar.set, "trio")
|
||||||
|
return trio.from_thread.run(wrapper)
|
||||||
|
|
||||||
|
|
||||||
|
def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval:
|
||||||
|
# TODO: remove explicit context copying when trio 0.20 is the minimum requirement
|
||||||
|
retval = trio.from_thread.run_sync(copy_context().run, fn, *args)
|
||||||
|
return cast(T_Retval, retval)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingPortal(abc.BlockingPortal):
|
||||||
|
def __new__(cls) -> BlockingPortal:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._token = trio.lowlevel.current_trio_token()
|
||||||
|
|
||||||
|
def _spawn_task_from_thread(
|
||||||
|
self,
|
||||||
|
func: Callable,
|
||||||
|
args: tuple,
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
name: object,
|
||||||
|
future: Future,
|
||||||
|
) -> None:
|
||||||
|
context = copy_context()
|
||||||
|
context.run(sniffio.current_async_library_cvar.set, "trio")
|
||||||
|
trio.from_thread.run_sync(
|
||||||
|
context.run,
|
||||||
|
partial(self._task_group.start_soon, name=name),
|
||||||
|
self._call_func,
|
||||||
|
func,
|
||||||
|
args,
|
||||||
|
kwargs,
|
||||||
|
future,
|
||||||
|
trio_token=self._token,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Subprocesses
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class ReceiveStreamWrapper(abc.ByteReceiveStream):
|
||||||
|
_stream: trio.abc.ReceiveStream
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int | None = None) -> bytes:
|
||||||
|
try:
|
||||||
|
data = await self._stream.receive_some(max_bytes)
|
||||||
|
except trio.ClosedResourceError as exc:
|
||||||
|
raise ClosedResourceError from exc.__cause__
|
||||||
|
except trio.BrokenResourceError as exc:
|
||||||
|
raise BrokenResourceError from exc.__cause__
|
||||||
|
|
||||||
|
if data:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self._stream.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class SendStreamWrapper(abc.ByteSendStream):
|
||||||
|
_stream: trio.abc.SendStream
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
try:
|
||||||
|
await self._stream.send_all(item)
|
||||||
|
except trio.ClosedResourceError as exc:
|
||||||
|
raise ClosedResourceError from exc.__cause__
|
||||||
|
except trio.BrokenResourceError as exc:
|
||||||
|
raise BrokenResourceError from exc.__cause__
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self._stream.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class Process(abc.Process):
|
||||||
|
_process: trio.Process
|
||||||
|
_stdin: abc.ByteSendStream | None
|
||||||
|
_stdout: abc.ByteReceiveStream | None
|
||||||
|
_stderr: abc.ByteReceiveStream | None
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
if self._stdin:
|
||||||
|
await self._stdin.aclose()
|
||||||
|
if self._stdout:
|
||||||
|
await self._stdout.aclose()
|
||||||
|
if self._stderr:
|
||||||
|
await self._stderr.aclose()
|
||||||
|
|
||||||
|
await self.wait()
|
||||||
|
|
||||||
|
async def wait(self) -> int:
|
||||||
|
return await self._process.wait()
|
||||||
|
|
||||||
|
def terminate(self) -> None:
|
||||||
|
self._process.terminate()
|
||||||
|
|
||||||
|
def kill(self) -> None:
|
||||||
|
self._process.kill()
|
||||||
|
|
||||||
|
def send_signal(self, signal: Signals) -> None:
|
||||||
|
self._process.send_signal(signal)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pid(self) -> int:
|
||||||
|
return self._process.pid
|
||||||
|
|
||||||
|
@property
|
||||||
|
def returncode(self) -> int | None:
|
||||||
|
return self._process.returncode
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stdin(self) -> abc.ByteSendStream | None:
|
||||||
|
return self._stdin
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stdout(self) -> abc.ByteReceiveStream | None:
|
||||||
|
return self._stdout
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stderr(self) -> abc.ByteReceiveStream | None:
|
||||||
|
return self._stderr
|
||||||
|
|
||||||
|
|
||||||
|
async def open_process(
|
||||||
|
command: str | bytes | Sequence[str | bytes],
|
||||||
|
*,
|
||||||
|
shell: bool,
|
||||||
|
stdin: int | IO[Any] | None,
|
||||||
|
stdout: int | IO[Any] | None,
|
||||||
|
stderr: int | IO[Any] | None,
|
||||||
|
cwd: str | bytes | PathLike | None = None,
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
start_new_session: bool = False,
|
||||||
|
) -> Process:
|
||||||
|
process = await trio_open_process( # type: ignore[misc]
|
||||||
|
command, # type: ignore[arg-type]
|
||||||
|
stdin=stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
shell=shell,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
start_new_session=start_new_session,
|
||||||
|
)
|
||||||
|
stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
|
||||||
|
stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
|
||||||
|
stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
|
||||||
|
return Process(process, stdin_stream, stdout_stream, stderr_stream)
|
||||||
|
|
||||||
|
|
||||||
|
class _ProcessPoolShutdownInstrument(trio.abc.Instrument):
|
||||||
|
def after_run(self) -> None:
|
||||||
|
super().after_run()
|
||||||
|
|
||||||
|
|
||||||
|
current_default_worker_process_limiter: RunVar = RunVar(
|
||||||
|
"current_default_worker_process_limiter"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _shutdown_process_pool(workers: set[Process]) -> None:
|
||||||
|
process: Process
|
||||||
|
try:
|
||||||
|
await sleep(math.inf)
|
||||||
|
except trio.Cancelled:
|
||||||
|
for process in workers:
|
||||||
|
if process.returncode is None:
|
||||||
|
process.kill()
|
||||||
|
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
for process in workers:
|
||||||
|
await process.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None:
|
||||||
|
trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Sockets and networking
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class _TrioSocketMixin(Generic[T_SockAddr]):
|
||||||
|
def __init__(self, trio_socket: TrioSocketType) -> None:
|
||||||
|
self._trio_socket = trio_socket
|
||||||
|
self._closed = False
|
||||||
|
|
||||||
|
def _check_closed(self) -> None:
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
if self._trio_socket.fileno() < 0:
|
||||||
|
raise BrokenResourceError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _raw_socket(self) -> socket.socket:
|
||||||
|
return self._trio_socket._sock # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
if self._trio_socket.fileno() >= 0:
|
||||||
|
self._closed = True
|
||||||
|
self._trio_socket.close()
|
||||||
|
|
||||||
|
def _convert_socket_error(self, exc: BaseException) -> NoReturn:
|
||||||
|
if isinstance(exc, trio.ClosedResourceError):
|
||||||
|
raise ClosedResourceError from exc
|
||||||
|
elif self._trio_socket.fileno() < 0 and self._closed:
|
||||||
|
raise ClosedResourceError from None
|
||||||
|
elif isinstance(exc, OSError):
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
else:
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
|
||||||
|
class SocketStream(_TrioSocketMixin, abc.SocketStream):
|
||||||
|
def __init__(self, trio_socket: TrioSocketType) -> None:
|
||||||
|
super().__init__(trio_socket)
|
||||||
|
self._receive_guard = ResourceGuard("reading from")
|
||||||
|
self._send_guard = ResourceGuard("writing to")
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
with self._receive_guard:
|
||||||
|
try:
|
||||||
|
data = await self._trio_socket.recv(max_bytes)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
with self._send_guard:
|
||||||
|
view = memoryview(item)
|
||||||
|
while view:
|
||||||
|
try:
|
||||||
|
bytes_sent = await self._trio_socket.send(view)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
view = view[bytes_sent:]
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
self._trio_socket.shutdown(socket.SHUT_WR)
|
||||||
|
|
||||||
|
|
||||||
|
class UNIXSocketStream(SocketStream, abc.UNIXSocketStream):
|
||||||
|
async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
|
||||||
|
if not isinstance(msglen, int) or msglen < 0:
|
||||||
|
raise ValueError("msglen must be a non-negative integer")
|
||||||
|
if not isinstance(maxfds, int) or maxfds < 1:
|
||||||
|
raise ValueError("maxfds must be a positive integer")
|
||||||
|
|
||||||
|
fds = array.array("i")
|
||||||
|
await checkpoint()
|
||||||
|
with self._receive_guard:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
message, ancdata, flags, addr = await self._trio_socket.recvmsg(
|
||||||
|
msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
|
||||||
|
)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
else:
|
||||||
|
if not message and not ancdata:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
for cmsg_level, cmsg_type, cmsg_data in ancdata:
|
||||||
|
if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Received unexpected ancillary data; message = {message!r}, "
|
||||||
|
f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
|
||||||
|
)
|
||||||
|
|
||||||
|
fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
|
||||||
|
|
||||||
|
return message, list(fds)
|
||||||
|
|
||||||
|
async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
|
||||||
|
if not message:
|
||||||
|
raise ValueError("message must not be empty")
|
||||||
|
if not fds:
|
||||||
|
raise ValueError("fds must not be empty")
|
||||||
|
|
||||||
|
filenos: list[int] = []
|
||||||
|
for fd in fds:
|
||||||
|
if isinstance(fd, int):
|
||||||
|
filenos.append(fd)
|
||||||
|
elif isinstance(fd, IOBase):
|
||||||
|
filenos.append(fd.fileno())
|
||||||
|
|
||||||
|
fdarray = array.array("i", filenos)
|
||||||
|
await checkpoint()
|
||||||
|
with self._send_guard:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
await self._trio_socket.sendmsg(
|
||||||
|
[message],
|
||||||
|
[
|
||||||
|
(
|
||||||
|
socket.SOL_SOCKET,
|
||||||
|
socket.SCM_RIGHTS, # type: ignore[list-item]
|
||||||
|
fdarray,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
break
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
|
||||||
|
class TCPSocketListener(_TrioSocketMixin, abc.SocketListener):
|
||||||
|
def __init__(self, raw_socket: socket.socket):
|
||||||
|
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
|
||||||
|
self._accept_guard = ResourceGuard("accepting connections from")
|
||||||
|
|
||||||
|
async def accept(self) -> SocketStream:
|
||||||
|
with self._accept_guard:
|
||||||
|
try:
|
||||||
|
trio_socket, _addr = await self._trio_socket.accept()
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
return SocketStream(trio_socket)
|
||||||
|
|
||||||
|
|
||||||
|
class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener):
|
||||||
|
def __init__(self, raw_socket: socket.socket):
|
||||||
|
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
|
||||||
|
self._accept_guard = ResourceGuard("accepting connections from")
|
||||||
|
|
||||||
|
async def accept(self) -> UNIXSocketStream:
|
||||||
|
with self._accept_guard:
|
||||||
|
try:
|
||||||
|
trio_socket, _addr = await self._trio_socket.accept()
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
return UNIXSocketStream(trio_socket)
|
||||||
|
|
||||||
|
|
||||||
|
class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket):
|
||||||
|
def __init__(self, trio_socket: TrioSocketType) -> None:
|
||||||
|
super().__init__(trio_socket)
|
||||||
|
self._receive_guard = ResourceGuard("reading from")
|
||||||
|
self._send_guard = ResourceGuard("writing to")
|
||||||
|
|
||||||
|
async def receive(self) -> tuple[bytes, IPSockAddrType]:
|
||||||
|
with self._receive_guard:
|
||||||
|
try:
|
||||||
|
data, addr = await self._trio_socket.recvfrom(65536)
|
||||||
|
return data, convert_ipv6_sockaddr(addr)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
async def send(self, item: UDPPacketType) -> None:
|
||||||
|
with self._send_guard:
|
||||||
|
try:
|
||||||
|
await self._trio_socket.sendto(*item)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket):
|
||||||
|
def __init__(self, trio_socket: TrioSocketType) -> None:
|
||||||
|
super().__init__(trio_socket)
|
||||||
|
self._receive_guard = ResourceGuard("reading from")
|
||||||
|
self._send_guard = ResourceGuard("writing to")
|
||||||
|
|
||||||
|
async def receive(self) -> bytes:
|
||||||
|
with self._receive_guard:
|
||||||
|
try:
|
||||||
|
return await self._trio_socket.recv(65536)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
with self._send_guard:
|
||||||
|
try:
|
||||||
|
await self._trio_socket.send(item)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._convert_socket_error(exc)
|
||||||
|
|
||||||
|
|
||||||
|
async def connect_tcp(
|
||||||
|
host: str, port: int, local_address: IPSockAddrType | None = None
|
||||||
|
) -> SocketStream:
|
||||||
|
family = socket.AF_INET6 if ":" in host else socket.AF_INET
|
||||||
|
trio_socket = trio.socket.socket(family)
|
||||||
|
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
if local_address:
|
||||||
|
await trio_socket.bind(local_address)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await trio_socket.connect((host, port))
|
||||||
|
except BaseException:
|
||||||
|
trio_socket.close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
return SocketStream(trio_socket)
|
||||||
|
|
||||||
|
|
||||||
|
async def connect_unix(path: str) -> UNIXSocketStream:
|
||||||
|
trio_socket = trio.socket.socket(socket.AF_UNIX)
|
||||||
|
try:
|
||||||
|
await trio_socket.connect(path)
|
||||||
|
except BaseException:
|
||||||
|
trio_socket.close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
return UNIXSocketStream(trio_socket)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_udp_socket(
|
||||||
|
family: socket.AddressFamily,
|
||||||
|
local_address: IPSockAddrType | None,
|
||||||
|
remote_address: IPSockAddrType | None,
|
||||||
|
reuse_port: bool,
|
||||||
|
) -> UDPSocket | ConnectedUDPSocket:
|
||||||
|
trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM)
|
||||||
|
|
||||||
|
if reuse_port:
|
||||||
|
trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||||
|
|
||||||
|
if local_address:
|
||||||
|
await trio_socket.bind(local_address)
|
||||||
|
|
||||||
|
if remote_address:
|
||||||
|
await trio_socket.connect(remote_address)
|
||||||
|
return ConnectedUDPSocket(trio_socket)
|
||||||
|
else:
|
||||||
|
return UDPSocket(trio_socket)
|
||||||
|
|
||||||
|
|
||||||
|
getaddrinfo = trio.socket.getaddrinfo
|
||||||
|
getnameinfo = trio.socket.getnameinfo
|
||||||
|
|
||||||
|
|
||||||
|
async def wait_socket_readable(sock: socket.socket) -> None:
|
||||||
|
try:
|
||||||
|
await wait_readable(sock)
|
||||||
|
except trio.ClosedResourceError as exc:
|
||||||
|
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
|
||||||
|
except trio.BusyResourceError:
|
||||||
|
raise BusyResourceError("reading from") from None
|
||||||
|
|
||||||
|
|
||||||
|
async def wait_socket_writable(sock: socket.socket) -> None:
|
||||||
|
try:
|
||||||
|
await wait_writable(sock)
|
||||||
|
except trio.ClosedResourceError as exc:
|
||||||
|
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
|
||||||
|
except trio.BusyResourceError:
|
||||||
|
raise BusyResourceError("writing to") from None
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Synchronization
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class Event(BaseEvent):
|
||||||
|
def __new__(cls) -> Event:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.__original = trio.Event()
|
||||||
|
|
||||||
|
def is_set(self) -> bool:
|
||||||
|
return self.__original.is_set()
|
||||||
|
|
||||||
|
async def wait(self) -> None:
|
||||||
|
return await self.__original.wait()
|
||||||
|
|
||||||
|
def statistics(self) -> EventStatistics:
|
||||||
|
orig_statistics = self.__original.statistics()
|
||||||
|
return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting)
|
||||||
|
|
||||||
|
def set(self) -> DeprecatedAwaitable:
|
||||||
|
self.__original.set()
|
||||||
|
return DeprecatedAwaitable(self.set)
|
||||||
|
|
||||||
|
|
||||||
|
class CapacityLimiter(BaseCapacityLimiter):
|
||||||
|
def __new__(cls, *args: object, **kwargs: object) -> CapacityLimiter:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, *args: Any, original: trio.CapacityLimiter | None = None
|
||||||
|
) -> None:
|
||||||
|
self.__original = original or trio.CapacityLimiter(*args)
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
return await self.__original.__aenter__()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
await self.__original.__aexit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_tokens(self) -> float:
|
||||||
|
return self.__original.total_tokens
|
||||||
|
|
||||||
|
@total_tokens.setter
|
||||||
|
def total_tokens(self, value: float) -> None:
|
||||||
|
self.__original.total_tokens = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def borrowed_tokens(self) -> int:
|
||||||
|
return self.__original.borrowed_tokens
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available_tokens(self) -> float:
|
||||||
|
return self.__original.available_tokens
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> DeprecatedAwaitable:
|
||||||
|
self.__original.acquire_nowait()
|
||||||
|
return DeprecatedAwaitable(self.acquire_nowait)
|
||||||
|
|
||||||
|
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
|
||||||
|
self.__original.acquire_on_behalf_of_nowait(borrower)
|
||||||
|
return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait)
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
await self.__original.acquire()
|
||||||
|
|
||||||
|
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
await self.__original.acquire_on_behalf_of(borrower)
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
return self.__original.release()
|
||||||
|
|
||||||
|
def release_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
return self.__original.release_on_behalf_of(borrower)
|
||||||
|
|
||||||
|
def statistics(self) -> CapacityLimiterStatistics:
|
||||||
|
orig = self.__original.statistics()
|
||||||
|
return CapacityLimiterStatistics(
|
||||||
|
borrowed_tokens=orig.borrowed_tokens,
|
||||||
|
total_tokens=orig.total_tokens,
|
||||||
|
borrowers=orig.borrowers,
|
||||||
|
tasks_waiting=orig.tasks_waiting,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper")
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_thread_limiter() -> CapacityLimiter:
|
||||||
|
try:
|
||||||
|
return _capacity_limiter_wrapper.get()
|
||||||
|
except LookupError:
|
||||||
|
limiter = CapacityLimiter(
|
||||||
|
original=trio.to_thread.current_default_thread_limiter()
|
||||||
|
)
|
||||||
|
_capacity_limiter_wrapper.set(limiter)
|
||||||
|
return limiter
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Signal handling
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]):
|
||||||
|
_iterator: AsyncIterator[int]
|
||||||
|
|
||||||
|
def __init__(self, signals: tuple[Signals, ...]):
|
||||||
|
self._signals = signals
|
||||||
|
|
||||||
|
def __enter__(self) -> _SignalReceiver:
|
||||||
|
self._cm = trio.open_signal_receiver(*self._signals)
|
||||||
|
self._iterator = self._cm.__enter__()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
return self._cm.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
def __aiter__(self) -> _SignalReceiver:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self) -> Signals:
|
||||||
|
signum = await self._iterator.__anext__()
|
||||||
|
return Signals(signum)
|
||||||
|
|
||||||
|
|
||||||
|
def open_signal_receiver(*signals: Signals) -> _SignalReceiver:
|
||||||
|
return _SignalReceiver(signals)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Testing and debugging
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_task() -> TaskInfo:
|
||||||
|
task = trio_lowlevel.current_task()
|
||||||
|
|
||||||
|
parent_id = None
|
||||||
|
if task.parent_nursery and task.parent_nursery.parent_task:
|
||||||
|
parent_id = id(task.parent_nursery.parent_task)
|
||||||
|
|
||||||
|
return TaskInfo(id(task), parent_id, task.name, task.coro)
|
||||||
|
|
||||||
|
|
||||||
|
def get_running_tasks() -> list[TaskInfo]:
|
||||||
|
root_task = trio_lowlevel.current_root_task()
|
||||||
|
task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)]
|
||||||
|
nurseries = root_task.child_nurseries
|
||||||
|
while nurseries:
|
||||||
|
new_nurseries: list[trio.Nursery] = []
|
||||||
|
for nursery in nurseries:
|
||||||
|
for task in nursery.child_tasks:
|
||||||
|
task_infos.append(
|
||||||
|
TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro)
|
||||||
|
)
|
||||||
|
new_nurseries.extend(task.child_nurseries)
|
||||||
|
|
||||||
|
nurseries = new_nurseries
|
||||||
|
|
||||||
|
return task_infos
|
||||||
|
|
||||||
|
|
||||||
|
def wait_all_tasks_blocked() -> Awaitable[None]:
|
||||||
|
import trio.testing
|
||||||
|
|
||||||
|
return trio.testing.wait_all_tasks_blocked()
|
||||||
|
|
||||||
|
|
||||||
|
class TestRunner(abc.TestRunner):
|
||||||
|
def __init__(self, **options: Any) -> None:
|
||||||
|
from collections import deque
|
||||||
|
from queue import Queue
|
||||||
|
|
||||||
|
self._call_queue: Queue[Callable[..., object]] = Queue()
|
||||||
|
self._result_queue: deque[Outcome] = deque()
|
||||||
|
self._stop_event: trio.Event | None = None
|
||||||
|
self._nursery: trio.Nursery | None = None
|
||||||
|
self._options = options
|
||||||
|
|
||||||
|
async def _trio_main(self) -> None:
|
||||||
|
self._stop_event = trio.Event()
|
||||||
|
async with trio.open_nursery() as self._nursery:
|
||||||
|
await self._stop_event.wait()
|
||||||
|
|
||||||
|
async def _call_func(
|
||||||
|
self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
retval = await func(*args, **kwargs)
|
||||||
|
except BaseException as exc:
|
||||||
|
self._result_queue.append(Error(exc))
|
||||||
|
else:
|
||||||
|
self._result_queue.append(Value(retval))
|
||||||
|
|
||||||
|
def _main_task_finished(self, outcome: object) -> None:
|
||||||
|
self._nursery = None
|
||||||
|
|
||||||
|
def _get_nursery(self) -> trio.Nursery:
|
||||||
|
if self._nursery is None:
|
||||||
|
trio.lowlevel.start_guest_run(
|
||||||
|
self._trio_main,
|
||||||
|
run_sync_soon_threadsafe=self._call_queue.put,
|
||||||
|
done_callback=self._main_task_finished,
|
||||||
|
**self._options,
|
||||||
|
)
|
||||||
|
while self._nursery is None:
|
||||||
|
self._call_queue.get()()
|
||||||
|
|
||||||
|
return self._nursery
|
||||||
|
|
||||||
|
def _call(
|
||||||
|
self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object
|
||||||
|
) -> T_Retval:
|
||||||
|
self._get_nursery().start_soon(self._call_func, func, args, kwargs)
|
||||||
|
while not self._result_queue:
|
||||||
|
self._call_queue.get()()
|
||||||
|
|
||||||
|
outcome = self._result_queue.pop()
|
||||||
|
return outcome.unwrap()
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
if self._stop_event:
|
||||||
|
self._stop_event.set()
|
||||||
|
while self._nursery is not None:
|
||||||
|
self._call_queue.get()()
|
||||||
|
|
||||||
|
def run_asyncgen_fixture(
|
||||||
|
self,
|
||||||
|
fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
) -> Iterable[T_Retval]:
|
||||||
|
async def fixture_runner(*, task_status: TaskStatus[T_Retval]) -> None:
|
||||||
|
agen = fixture_func(**kwargs)
|
||||||
|
retval = await agen.asend(None)
|
||||||
|
task_status.started(retval)
|
||||||
|
await teardown_event.wait()
|
||||||
|
try:
|
||||||
|
await agen.asend(None)
|
||||||
|
except StopAsyncIteration:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
await agen.aclose()
|
||||||
|
raise RuntimeError("Async generator fixture did not stop")
|
||||||
|
|
||||||
|
teardown_event = trio.Event()
|
||||||
|
fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner))
|
||||||
|
yield fixture_value
|
||||||
|
teardown_event.set()
|
||||||
|
|
||||||
|
def run_fixture(
|
||||||
|
self,
|
||||||
|
fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
) -> T_Retval:
|
||||||
|
return self._call(fixture_func, **kwargs)
|
||||||
|
|
||||||
|
def run_test(
|
||||||
|
self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
self._call(test_func, **kwargs)
|
|
@ -0,0 +1,217 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from contextlib import AbstractContextManager
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
AsyncContextManager,
|
||||||
|
Callable,
|
||||||
|
ContextManager,
|
||||||
|
Generator,
|
||||||
|
Generic,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._testing import TaskInfo
|
||||||
|
else:
|
||||||
|
TaskInfo = object
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
AnyDeprecatedAwaitable = Union[
|
||||||
|
"DeprecatedAwaitable",
|
||||||
|
"DeprecatedAwaitableFloat",
|
||||||
|
"DeprecatedAwaitableList[T]",
|
||||||
|
TaskInfo,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def maybe_async(__obj: TaskInfo) -> TaskInfo:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def maybe_async(__obj: DeprecatedAwaitableFloat) -> float:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def maybe_async(__obj: DeprecatedAwaitableList[T]) -> list[T]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def maybe_async(__obj: DeprecatedAwaitable) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
async def maybe_async(
|
||||||
|
__obj: AnyDeprecatedAwaitable[T],
|
||||||
|
) -> TaskInfo | float | list[T] | None:
|
||||||
|
"""
|
||||||
|
Await on the given object if necessary.
|
||||||
|
|
||||||
|
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
|
||||||
|
methods were converted from coroutine functions into regular functions.
|
||||||
|
|
||||||
|
Do **not** try to use this for any other purpose!
|
||||||
|
|
||||||
|
:return: the result of awaiting on the object if coroutine, or the object itself otherwise
|
||||||
|
|
||||||
|
.. versionadded:: 2.2
|
||||||
|
|
||||||
|
"""
|
||||||
|
return __obj._unwrap()
|
||||||
|
|
||||||
|
|
||||||
|
class _ContextManagerWrapper:
|
||||||
|
def __init__(self, cm: ContextManager[T]):
|
||||||
|
self._cm = cm
|
||||||
|
|
||||||
|
async def __aenter__(self) -> T:
|
||||||
|
return self._cm.__enter__()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
return self._cm.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_async_cm(
|
||||||
|
cm: ContextManager[T] | AsyncContextManager[T],
|
||||||
|
) -> AsyncContextManager[T]:
|
||||||
|
"""
|
||||||
|
Wrap a regular context manager as an async one if necessary.
|
||||||
|
|
||||||
|
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
|
||||||
|
methods were changed to return regular context managers instead of async ones.
|
||||||
|
|
||||||
|
:param cm: a regular or async context manager
|
||||||
|
:return: an async context manager
|
||||||
|
|
||||||
|
.. versionadded:: 2.2
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not isinstance(cm, AbstractContextManager):
|
||||||
|
raise TypeError("Given object is not an context manager")
|
||||||
|
|
||||||
|
return _ContextManagerWrapper(cm)
|
||||||
|
|
||||||
|
|
||||||
|
def _warn_deprecation(
|
||||||
|
awaitable: AnyDeprecatedAwaitable[Any], stacklevel: int = 1
|
||||||
|
) -> None:
|
||||||
|
warn(
|
||||||
|
f'Awaiting on {awaitable._name}() is deprecated. Use "await '
|
||||||
|
f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x "
|
||||||
|
f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.',
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=stacklevel + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedAwaitable:
|
||||||
|
def __init__(self, func: Callable[..., DeprecatedAwaitable]):
|
||||||
|
self._name = f"{func.__module__}.{func.__qualname__}"
|
||||||
|
|
||||||
|
def __await__(self) -> Generator[None, None, None]:
|
||||||
|
_warn_deprecation(self)
|
||||||
|
if False:
|
||||||
|
yield
|
||||||
|
|
||||||
|
def __reduce__(self) -> tuple[type[None], tuple[()]]:
|
||||||
|
return type(None), ()
|
||||||
|
|
||||||
|
def _unwrap(self) -> None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedAwaitableFloat(float):
|
||||||
|
def __new__(
|
||||||
|
cls, x: float, func: Callable[..., DeprecatedAwaitableFloat]
|
||||||
|
) -> DeprecatedAwaitableFloat:
|
||||||
|
return super().__new__(cls, x)
|
||||||
|
|
||||||
|
def __init__(self, x: float, func: Callable[..., DeprecatedAwaitableFloat]):
|
||||||
|
self._name = f"{func.__module__}.{func.__qualname__}"
|
||||||
|
|
||||||
|
def __await__(self) -> Generator[None, None, float]:
|
||||||
|
_warn_deprecation(self)
|
||||||
|
if False:
|
||||||
|
yield
|
||||||
|
|
||||||
|
return float(self)
|
||||||
|
|
||||||
|
def __reduce__(self) -> tuple[type[float], tuple[float]]:
|
||||||
|
return float, (float(self),)
|
||||||
|
|
||||||
|
def _unwrap(self) -> float:
|
||||||
|
return float(self)
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedAwaitableList(List[T]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
iterable: Iterable[T] = (),
|
||||||
|
*,
|
||||||
|
func: Callable[..., DeprecatedAwaitableList[T]],
|
||||||
|
):
|
||||||
|
super().__init__(iterable)
|
||||||
|
self._name = f"{func.__module__}.{func.__qualname__}"
|
||||||
|
|
||||||
|
def __await__(self) -> Generator[None, None, list[T]]:
|
||||||
|
_warn_deprecation(self)
|
||||||
|
if False:
|
||||||
|
yield
|
||||||
|
|
||||||
|
return list(self)
|
||||||
|
|
||||||
|
def __reduce__(self) -> tuple[type[list[T]], tuple[list[T]]]:
|
||||||
|
return list, (list(self),)
|
||||||
|
|
||||||
|
def _unwrap(self) -> list[T]:
|
||||||
|
return list(self)
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta):
|
||||||
|
@abstractmethod
|
||||||
|
def __enter__(self) -> T:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def __aenter__(self) -> T:
|
||||||
|
warn(
|
||||||
|
f"Using {self.__class__.__name__} as an async context manager has been deprecated. "
|
||||||
|
f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to '
|
||||||
|
f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if '
|
||||||
|
f"you are completely migrating to AnyIO 3+.",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return self.__enter__()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
return self.__exit__(exc_type, exc_val, exc_tb)
|
|
@ -0,0 +1,153 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from importlib import import_module
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Generator,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
import sniffio
|
||||||
|
|
||||||
|
# This must be updated when new backends are introduced
|
||||||
|
from ._compat import DeprecatedAwaitableFloat
|
||||||
|
|
||||||
|
BACKENDS = "asyncio", "trio"
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
threadlocals = threading.local()
|
||||||
|
|
||||||
|
|
||||||
|
def run(
|
||||||
|
func: Callable[..., Awaitable[T_Retval]],
|
||||||
|
*args: object,
|
||||||
|
backend: str = "asyncio",
|
||||||
|
backend_options: dict[str, Any] | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Run the given coroutine function in an asynchronous event loop.
|
||||||
|
|
||||||
|
The current thread must not be already running an event loop.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to ``func``
|
||||||
|
:param backend: name of the asynchronous event loop implementation – currently either
|
||||||
|
``asyncio`` or ``trio``
|
||||||
|
:param backend_options: keyword arguments to call the backend ``run()`` implementation with
|
||||||
|
(documented :ref:`here <backend options>`)
|
||||||
|
:return: the return value of the coroutine function
|
||||||
|
:raises RuntimeError: if an asynchronous event loop is already running in this thread
|
||||||
|
:raises LookupError: if the named backend is not found
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
asynclib_name = sniffio.current_async_library()
|
||||||
|
except sniffio.AsyncLibraryNotFoundError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Already running {asynclib_name} in this thread")
|
||||||
|
|
||||||
|
try:
|
||||||
|
asynclib = import_module(f"..._backends._{backend}", package=__name__)
|
||||||
|
except ImportError as exc:
|
||||||
|
raise LookupError(f"No such backend: {backend}") from exc
|
||||||
|
|
||||||
|
token = None
|
||||||
|
if sniffio.current_async_library_cvar.get(None) is None:
|
||||||
|
# Since we're in control of the event loop, we can cache the name of the async library
|
||||||
|
token = sniffio.current_async_library_cvar.set(backend)
|
||||||
|
|
||||||
|
try:
|
||||||
|
backend_options = backend_options or {}
|
||||||
|
return asynclib.run(func, *args, **backend_options)
|
||||||
|
finally:
|
||||||
|
if token:
|
||||||
|
sniffio.current_async_library_cvar.reset(token)
|
||||||
|
|
||||||
|
|
||||||
|
async def sleep(delay: float) -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task for the specified duration.
|
||||||
|
|
||||||
|
:param delay: the duration, in seconds
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await get_asynclib().sleep(delay)
|
||||||
|
|
||||||
|
|
||||||
|
async def sleep_forever() -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task until it's cancelled.
|
||||||
|
|
||||||
|
This is a shortcut for ``sleep(math.inf)``.
|
||||||
|
|
||||||
|
.. versionadded:: 3.1
|
||||||
|
|
||||||
|
"""
|
||||||
|
await sleep(math.inf)
|
||||||
|
|
||||||
|
|
||||||
|
async def sleep_until(deadline: float) -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task until the given time.
|
||||||
|
|
||||||
|
:param deadline: the absolute time to wake up at (according to the internal monotonic clock of
|
||||||
|
the event loop)
|
||||||
|
|
||||||
|
.. versionadded:: 3.1
|
||||||
|
|
||||||
|
"""
|
||||||
|
now = current_time()
|
||||||
|
await sleep(max(deadline - now, 0))
|
||||||
|
|
||||||
|
|
||||||
|
def current_time() -> DeprecatedAwaitableFloat:
|
||||||
|
"""
|
||||||
|
Return the current value of the event loop's internal clock.
|
||||||
|
|
||||||
|
:return: the clock value (seconds)
|
||||||
|
|
||||||
|
"""
|
||||||
|
return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time)
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_backends() -> tuple[str, ...]:
|
||||||
|
"""Return a tuple of the names of all built-in backends."""
|
||||||
|
return BACKENDS
|
||||||
|
|
||||||
|
|
||||||
|
def get_cancelled_exc_class() -> type[BaseException]:
|
||||||
|
"""Return the current async library's cancellation exception class."""
|
||||||
|
return get_asynclib().CancelledError
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Private API
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def claim_worker_thread(backend: str) -> Generator[Any, None, None]:
|
||||||
|
module = sys.modules["anyio._backends._" + backend]
|
||||||
|
threadlocals.current_async_module = module
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
del threadlocals.current_async_module
|
||||||
|
|
||||||
|
|
||||||
|
def get_asynclib(asynclib_name: str | None = None) -> Any:
|
||||||
|
if asynclib_name is None:
|
||||||
|
asynclib_name = sniffio.current_async_library()
|
||||||
|
|
||||||
|
modulename = "anyio._backends._" + asynclib_name
|
||||||
|
try:
|
||||||
|
return sys.modules[modulename]
|
||||||
|
except KeyError:
|
||||||
|
return import_module(modulename)
|
|
@ -0,0 +1,94 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from traceback import format_exception
|
||||||
|
|
||||||
|
|
||||||
|
class BrokenResourceError(Exception):
|
||||||
|
"""
|
||||||
|
Raised when trying to use a resource that has been rendered unusable due to external causes
|
||||||
|
(e.g. a send stream whose peer has disconnected).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BrokenWorkerProcess(Exception):
|
||||||
|
"""
|
||||||
|
Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise
|
||||||
|
misbehaves.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BusyResourceError(Exception):
|
||||||
|
"""Raised when two tasks are trying to read from or write to the same resource concurrently."""
|
||||||
|
|
||||||
|
def __init__(self, action: str):
|
||||||
|
super().__init__(f"Another task is already {action} this resource")
|
||||||
|
|
||||||
|
|
||||||
|
class ClosedResourceError(Exception):
|
||||||
|
"""Raised when trying to use a resource that has been closed."""
|
||||||
|
|
||||||
|
|
||||||
|
class DelimiterNotFound(Exception):
|
||||||
|
"""
|
||||||
|
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
||||||
|
maximum number of bytes has been read without the delimiter being found.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, max_bytes: int) -> None:
|
||||||
|
super().__init__(
|
||||||
|
f"The delimiter was not found among the first {max_bytes} bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EndOfStream(Exception):
|
||||||
|
"""Raised when trying to read from a stream that has been closed from the other end."""
|
||||||
|
|
||||||
|
|
||||||
|
class ExceptionGroup(BaseException):
|
||||||
|
"""
|
||||||
|
Raised when multiple exceptions have been raised in a task group.
|
||||||
|
|
||||||
|
:var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together
|
||||||
|
"""
|
||||||
|
|
||||||
|
SEPARATOR = "----------------------------\n"
|
||||||
|
|
||||||
|
exceptions: list[BaseException]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
tracebacks = [
|
||||||
|
"".join(format_exception(type(exc), exc, exc.__traceback__))
|
||||||
|
for exc in self.exceptions
|
||||||
|
]
|
||||||
|
return (
|
||||||
|
f"{len(self.exceptions)} exceptions were raised in the task group:\n"
|
||||||
|
f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
exception_reprs = ", ".join(repr(exc) for exc in self.exceptions)
|
||||||
|
return f"<{self.__class__.__name__}: {exception_reprs}>"
|
||||||
|
|
||||||
|
|
||||||
|
class IncompleteRead(Exception):
|
||||||
|
"""
|
||||||
|
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
|
||||||
|
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
||||||
|
connection is closed before the requested amount of bytes has been read.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__(
|
||||||
|
"The stream was closed before the read operation could be completed"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TypedAttributeLookupError(LookupError):
|
||||||
|
"""
|
||||||
|
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not
|
||||||
|
found and no default value has been given.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class WouldBlock(Exception):
|
||||||
|
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
|
|
@ -0,0 +1,603 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import partial
|
||||||
|
from os import PathLike
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
AnyStr,
|
||||||
|
AsyncIterator,
|
||||||
|
Callable,
|
||||||
|
Generic,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
Sequence,
|
||||||
|
cast,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .. import to_thread
|
||||||
|
from ..abc import AsyncResource
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Final
|
||||||
|
else:
|
||||||
|
from typing_extensions import Final
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
|
||||||
|
else:
|
||||||
|
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncFile(AsyncResource, Generic[AnyStr]):
|
||||||
|
"""
|
||||||
|
An asynchronous file object.
|
||||||
|
|
||||||
|
This class wraps a standard file object and provides async friendly versions of the following
|
||||||
|
blocking methods (where available on the original file object):
|
||||||
|
|
||||||
|
* read
|
||||||
|
* read1
|
||||||
|
* readline
|
||||||
|
* readlines
|
||||||
|
* readinto
|
||||||
|
* readinto1
|
||||||
|
* write
|
||||||
|
* writelines
|
||||||
|
* truncate
|
||||||
|
* seek
|
||||||
|
* tell
|
||||||
|
* flush
|
||||||
|
|
||||||
|
All other methods are directly passed through.
|
||||||
|
|
||||||
|
This class supports the asynchronous context manager protocol which closes the underlying file
|
||||||
|
at the end of the context block.
|
||||||
|
|
||||||
|
This class also supports asynchronous iteration::
|
||||||
|
|
||||||
|
async with await open_file(...) as f:
|
||||||
|
async for line in f:
|
||||||
|
print(line)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, fp: IO[AnyStr]) -> None:
|
||||||
|
self._fp: Any = fp
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> object:
|
||||||
|
return getattr(self._fp, name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def wrapped(self) -> IO[AnyStr]:
|
||||||
|
"""The wrapped file object."""
|
||||||
|
return self._fp
|
||||||
|
|
||||||
|
async def __aiter__(self) -> AsyncIterator[AnyStr]:
|
||||||
|
while True:
|
||||||
|
line = await self.readline()
|
||||||
|
if line:
|
||||||
|
yield line
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
return await to_thread.run_sync(self._fp.close)
|
||||||
|
|
||||||
|
async def read(self, size: int = -1) -> AnyStr:
|
||||||
|
return await to_thread.run_sync(self._fp.read, size)
|
||||||
|
|
||||||
|
async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
|
||||||
|
return await to_thread.run_sync(self._fp.read1, size)
|
||||||
|
|
||||||
|
async def readline(self) -> AnyStr:
|
||||||
|
return await to_thread.run_sync(self._fp.readline)
|
||||||
|
|
||||||
|
async def readlines(self) -> list[AnyStr]:
|
||||||
|
return await to_thread.run_sync(self._fp.readlines)
|
||||||
|
|
||||||
|
async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes:
|
||||||
|
return await to_thread.run_sync(self._fp.readinto, b)
|
||||||
|
|
||||||
|
async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes:
|
||||||
|
return await to_thread.run_sync(self._fp.readinto1, b)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def write(self: AsyncFile[str], b: str) -> int:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def write(self, b: ReadableBuffer | str) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.write, b)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def writelines(
|
||||||
|
self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
|
||||||
|
) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
|
||||||
|
return await to_thread.run_sync(self._fp.writelines, lines)
|
||||||
|
|
||||||
|
async def truncate(self, size: int | None = None) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.truncate, size)
|
||||||
|
|
||||||
|
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.seek, offset, whence)
|
||||||
|
|
||||||
|
async def tell(self) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.tell)
|
||||||
|
|
||||||
|
async def flush(self) -> None:
|
||||||
|
return await to_thread.run_sync(self._fp.flush)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open_file(
|
||||||
|
file: str | PathLike[str] | int,
|
||||||
|
mode: OpenBinaryMode,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
closefd: bool = ...,
|
||||||
|
opener: Callable[[str, int], int] | None = ...,
|
||||||
|
) -> AsyncFile[bytes]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open_file(
|
||||||
|
file: str | PathLike[str] | int,
|
||||||
|
mode: OpenTextMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
closefd: bool = ...,
|
||||||
|
opener: Callable[[str, int], int] | None = ...,
|
||||||
|
) -> AsyncFile[str]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
async def open_file(
|
||||||
|
file: str | PathLike[str] | int,
|
||||||
|
mode: str = "r",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
errors: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
closefd: bool = True,
|
||||||
|
opener: Callable[[str, int], int] | None = None,
|
||||||
|
) -> AsyncFile[Any]:
|
||||||
|
"""
|
||||||
|
Open a file asynchronously.
|
||||||
|
|
||||||
|
The arguments are exactly the same as for the builtin :func:`open`.
|
||||||
|
|
||||||
|
:return: an asynchronous file object
|
||||||
|
|
||||||
|
"""
|
||||||
|
fp = await to_thread.run_sync(
|
||||||
|
open, file, mode, buffering, encoding, errors, newline, closefd, opener
|
||||||
|
)
|
||||||
|
return AsyncFile(fp)
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
|
||||||
|
"""
|
||||||
|
Wrap an existing file as an asynchronous file.
|
||||||
|
|
||||||
|
:param file: an existing file-like object
|
||||||
|
:return: an asynchronous file object
|
||||||
|
|
||||||
|
"""
|
||||||
|
return AsyncFile(file)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class _PathIterator(AsyncIterator["Path"]):
|
||||||
|
iterator: Iterator[PathLike[str]]
|
||||||
|
|
||||||
|
async def __anext__(self) -> Path:
|
||||||
|
nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True)
|
||||||
|
if nextval is None:
|
||||||
|
raise StopAsyncIteration from None
|
||||||
|
|
||||||
|
return Path(cast("PathLike[str]", nextval))
|
||||||
|
|
||||||
|
|
||||||
|
class Path:
|
||||||
|
"""
|
||||||
|
An asynchronous version of :class:`pathlib.Path`.
|
||||||
|
|
||||||
|
This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but
|
||||||
|
it is compatible with the :class:`os.PathLike` interface.
|
||||||
|
|
||||||
|
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the
|
||||||
|
deprecated :meth:`~pathlib.Path.link_to` method.
|
||||||
|
|
||||||
|
Any methods that do disk I/O need to be awaited on. These methods are:
|
||||||
|
|
||||||
|
* :meth:`~pathlib.Path.absolute`
|
||||||
|
* :meth:`~pathlib.Path.chmod`
|
||||||
|
* :meth:`~pathlib.Path.cwd`
|
||||||
|
* :meth:`~pathlib.Path.exists`
|
||||||
|
* :meth:`~pathlib.Path.expanduser`
|
||||||
|
* :meth:`~pathlib.Path.group`
|
||||||
|
* :meth:`~pathlib.Path.hardlink_to`
|
||||||
|
* :meth:`~pathlib.Path.home`
|
||||||
|
* :meth:`~pathlib.Path.is_block_device`
|
||||||
|
* :meth:`~pathlib.Path.is_char_device`
|
||||||
|
* :meth:`~pathlib.Path.is_dir`
|
||||||
|
* :meth:`~pathlib.Path.is_fifo`
|
||||||
|
* :meth:`~pathlib.Path.is_file`
|
||||||
|
* :meth:`~pathlib.Path.is_mount`
|
||||||
|
* :meth:`~pathlib.Path.lchmod`
|
||||||
|
* :meth:`~pathlib.Path.lstat`
|
||||||
|
* :meth:`~pathlib.Path.mkdir`
|
||||||
|
* :meth:`~pathlib.Path.open`
|
||||||
|
* :meth:`~pathlib.Path.owner`
|
||||||
|
* :meth:`~pathlib.Path.read_bytes`
|
||||||
|
* :meth:`~pathlib.Path.read_text`
|
||||||
|
* :meth:`~pathlib.Path.readlink`
|
||||||
|
* :meth:`~pathlib.Path.rename`
|
||||||
|
* :meth:`~pathlib.Path.replace`
|
||||||
|
* :meth:`~pathlib.Path.rmdir`
|
||||||
|
* :meth:`~pathlib.Path.samefile`
|
||||||
|
* :meth:`~pathlib.Path.stat`
|
||||||
|
* :meth:`~pathlib.Path.touch`
|
||||||
|
* :meth:`~pathlib.Path.unlink`
|
||||||
|
* :meth:`~pathlib.Path.write_bytes`
|
||||||
|
* :meth:`~pathlib.Path.write_text`
|
||||||
|
|
||||||
|
Additionally, the following methods return an async iterator yielding :class:`~.Path` objects:
|
||||||
|
|
||||||
|
* :meth:`~pathlib.Path.glob`
|
||||||
|
* :meth:`~pathlib.Path.iterdir`
|
||||||
|
* :meth:`~pathlib.Path.rglob`
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "_path", "__weakref__"
|
||||||
|
|
||||||
|
__weakref__: Any
|
||||||
|
|
||||||
|
def __init__(self, *args: str | PathLike[str]) -> None:
|
||||||
|
self._path: Final[pathlib.Path] = pathlib.Path(*args)
|
||||||
|
|
||||||
|
def __fspath__(self) -> str:
|
||||||
|
return self._path.__fspath__()
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self._path.__str__()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}({self.as_posix()!r})"
|
||||||
|
|
||||||
|
def __bytes__(self) -> bytes:
|
||||||
|
return self._path.__bytes__()
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return self._path.__hash__()
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__eq__(target)
|
||||||
|
|
||||||
|
def __lt__(self, other: Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__lt__(target)
|
||||||
|
|
||||||
|
def __le__(self, other: Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__le__(target)
|
||||||
|
|
||||||
|
def __gt__(self, other: Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__gt__(target)
|
||||||
|
|
||||||
|
def __ge__(self, other: Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__ge__(target)
|
||||||
|
|
||||||
|
def __truediv__(self, other: Any) -> Path:
|
||||||
|
return Path(self._path / other)
|
||||||
|
|
||||||
|
def __rtruediv__(self, other: Any) -> Path:
|
||||||
|
return Path(other) / self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parts(self) -> tuple[str, ...]:
|
||||||
|
return self._path.parts
|
||||||
|
|
||||||
|
@property
|
||||||
|
def drive(self) -> str:
|
||||||
|
return self._path.drive
|
||||||
|
|
||||||
|
@property
|
||||||
|
def root(self) -> str:
|
||||||
|
return self._path.root
|
||||||
|
|
||||||
|
@property
|
||||||
|
def anchor(self) -> str:
|
||||||
|
return self._path.anchor
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parents(self) -> Sequence[Path]:
|
||||||
|
return tuple(Path(p) for p in self._path.parents)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parent(self) -> Path:
|
||||||
|
return Path(self._path.parent)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._path.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suffix(self) -> str:
|
||||||
|
return self._path.suffix
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suffixes(self) -> list[str]:
|
||||||
|
return self._path.suffixes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stem(self) -> str:
|
||||||
|
return self._path.stem
|
||||||
|
|
||||||
|
async def absolute(self) -> Path:
|
||||||
|
path = await to_thread.run_sync(self._path.absolute)
|
||||||
|
return Path(path)
|
||||||
|
|
||||||
|
def as_posix(self) -> str:
|
||||||
|
return self._path.as_posix()
|
||||||
|
|
||||||
|
def as_uri(self) -> str:
|
||||||
|
return self._path.as_uri()
|
||||||
|
|
||||||
|
def match(self, path_pattern: str) -> bool:
|
||||||
|
return self._path.match(path_pattern)
|
||||||
|
|
||||||
|
def is_relative_to(self, *other: str | PathLike[str]) -> bool:
|
||||||
|
try:
|
||||||
|
self.relative_to(*other)
|
||||||
|
return True
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
|
||||||
|
func = partial(os.chmod, follow_symlinks=follow_symlinks)
|
||||||
|
return await to_thread.run_sync(func, self._path, mode)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def cwd(cls) -> Path:
|
||||||
|
path = await to_thread.run_sync(pathlib.Path.cwd)
|
||||||
|
return cls(path)
|
||||||
|
|
||||||
|
async def exists(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.exists, cancellable=True)
|
||||||
|
|
||||||
|
async def expanduser(self) -> Path:
|
||||||
|
return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True))
|
||||||
|
|
||||||
|
def glob(self, pattern: str) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.glob(pattern)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
|
||||||
|
async def group(self) -> str:
|
||||||
|
return await to_thread.run_sync(self._path.group, cancellable=True)
|
||||||
|
|
||||||
|
async def hardlink_to(self, target: str | pathlib.Path | Path) -> None:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(os.link, target, self)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def home(cls) -> Path:
|
||||||
|
home_path = await to_thread.run_sync(pathlib.Path.home)
|
||||||
|
return cls(home_path)
|
||||||
|
|
||||||
|
def is_absolute(self) -> bool:
|
||||||
|
return self._path.is_absolute()
|
||||||
|
|
||||||
|
async def is_block_device(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_block_device, cancellable=True)
|
||||||
|
|
||||||
|
async def is_char_device(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_char_device, cancellable=True)
|
||||||
|
|
||||||
|
async def is_dir(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_dir, cancellable=True)
|
||||||
|
|
||||||
|
async def is_fifo(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_fifo, cancellable=True)
|
||||||
|
|
||||||
|
async def is_file(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_file, cancellable=True)
|
||||||
|
|
||||||
|
async def is_mount(self) -> bool:
|
||||||
|
return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True)
|
||||||
|
|
||||||
|
def is_reserved(self) -> bool:
|
||||||
|
return self._path.is_reserved()
|
||||||
|
|
||||||
|
async def is_socket(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_socket, cancellable=True)
|
||||||
|
|
||||||
|
async def is_symlink(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_symlink, cancellable=True)
|
||||||
|
|
||||||
|
def iterdir(self) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.iterdir()
|
||||||
|
return _PathIterator(gen)
|
||||||
|
|
||||||
|
def joinpath(self, *args: str | PathLike[str]) -> Path:
|
||||||
|
return Path(self._path.joinpath(*args))
|
||||||
|
|
||||||
|
async def lchmod(self, mode: int) -> None:
|
||||||
|
await to_thread.run_sync(self._path.lchmod, mode)
|
||||||
|
|
||||||
|
async def lstat(self) -> os.stat_result:
|
||||||
|
return await to_thread.run_sync(self._path.lstat, cancellable=True)
|
||||||
|
|
||||||
|
async def mkdir(
|
||||||
|
self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
|
||||||
|
) -> None:
|
||||||
|
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open(
|
||||||
|
self,
|
||||||
|
mode: OpenBinaryMode,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
) -> AsyncFile[bytes]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open(
|
||||||
|
self,
|
||||||
|
mode: OpenTextMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
) -> AsyncFile[str]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def open(
|
||||||
|
self,
|
||||||
|
mode: str = "r",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
errors: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
) -> AsyncFile[Any]:
|
||||||
|
fp = await to_thread.run_sync(
|
||||||
|
self._path.open, mode, buffering, encoding, errors, newline
|
||||||
|
)
|
||||||
|
return AsyncFile(fp)
|
||||||
|
|
||||||
|
async def owner(self) -> str:
|
||||||
|
return await to_thread.run_sync(self._path.owner, cancellable=True)
|
||||||
|
|
||||||
|
async def read_bytes(self) -> bytes:
|
||||||
|
return await to_thread.run_sync(self._path.read_bytes)
|
||||||
|
|
||||||
|
async def read_text(
|
||||||
|
self, encoding: str | None = None, errors: str | None = None
|
||||||
|
) -> str:
|
||||||
|
return await to_thread.run_sync(self._path.read_text, encoding, errors)
|
||||||
|
|
||||||
|
def relative_to(self, *other: str | PathLike[str]) -> Path:
|
||||||
|
return Path(self._path.relative_to(*other))
|
||||||
|
|
||||||
|
async def readlink(self) -> Path:
|
||||||
|
target = await to_thread.run_sync(os.readlink, self._path)
|
||||||
|
return Path(cast(str, target))
|
||||||
|
|
||||||
|
async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(self._path.rename, target)
|
||||||
|
return Path(target)
|
||||||
|
|
||||||
|
async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(self._path.replace, target)
|
||||||
|
return Path(target)
|
||||||
|
|
||||||
|
async def resolve(self, strict: bool = False) -> Path:
|
||||||
|
func = partial(self._path.resolve, strict=strict)
|
||||||
|
return Path(await to_thread.run_sync(func, cancellable=True))
|
||||||
|
|
||||||
|
def rglob(self, pattern: str) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.rglob(pattern)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
|
||||||
|
async def rmdir(self) -> None:
|
||||||
|
await to_thread.run_sync(self._path.rmdir)
|
||||||
|
|
||||||
|
async def samefile(
|
||||||
|
self, other_path: str | bytes | int | pathlib.Path | Path
|
||||||
|
) -> bool:
|
||||||
|
if isinstance(other_path, Path):
|
||||||
|
other_path = other_path._path
|
||||||
|
|
||||||
|
return await to_thread.run_sync(
|
||||||
|
self._path.samefile, other_path, cancellable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
|
||||||
|
func = partial(os.stat, follow_symlinks=follow_symlinks)
|
||||||
|
return await to_thread.run_sync(func, self._path, cancellable=True)
|
||||||
|
|
||||||
|
async def symlink_to(
|
||||||
|
self,
|
||||||
|
target: str | pathlib.Path | Path,
|
||||||
|
target_is_directory: bool = False,
|
||||||
|
) -> None:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
|
||||||
|
|
||||||
|
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
|
||||||
|
await to_thread.run_sync(self._path.touch, mode, exist_ok)
|
||||||
|
|
||||||
|
async def unlink(self, missing_ok: bool = False) -> None:
|
||||||
|
try:
|
||||||
|
await to_thread.run_sync(self._path.unlink)
|
||||||
|
except FileNotFoundError:
|
||||||
|
if not missing_ok:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def with_name(self, name: str) -> Path:
|
||||||
|
return Path(self._path.with_name(name))
|
||||||
|
|
||||||
|
def with_stem(self, stem: str) -> Path:
|
||||||
|
return Path(self._path.with_name(stem + self._path.suffix))
|
||||||
|
|
||||||
|
def with_suffix(self, suffix: str) -> Path:
|
||||||
|
return Path(self._path.with_suffix(suffix))
|
||||||
|
|
||||||
|
async def write_bytes(self, data: bytes) -> int:
|
||||||
|
return await to_thread.run_sync(self._path.write_bytes, data)
|
||||||
|
|
||||||
|
async def write_text(
|
||||||
|
self,
|
||||||
|
data: str,
|
||||||
|
encoding: str | None = None,
|
||||||
|
errors: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
) -> int:
|
||||||
|
# Path.write_text() does not support the "newline" parameter before Python 3.10
|
||||||
|
def sync_write_text() -> int:
|
||||||
|
with self._path.open(
|
||||||
|
"w", encoding=encoding, errors=errors, newline=newline
|
||||||
|
) as fp:
|
||||||
|
return fp.write(data)
|
||||||
|
|
||||||
|
return await to_thread.run_sync(sync_write_text)
|
||||||
|
|
||||||
|
|
||||||
|
PathLike.register(Path)
|
|
@ -0,0 +1,18 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ..abc import AsyncResource
|
||||||
|
from ._tasks import CancelScope
|
||||||
|
|
||||||
|
|
||||||
|
async def aclose_forcefully(resource: AsyncResource) -> None:
|
||||||
|
"""
|
||||||
|
Close an asynchronous resource in a cancelled scope.
|
||||||
|
|
||||||
|
Doing this closes the resource without waiting on anything.
|
||||||
|
|
||||||
|
:param resource: the resource to close
|
||||||
|
|
||||||
|
"""
|
||||||
|
with CancelScope() as scope:
|
||||||
|
scope.cancel()
|
||||||
|
await resource.aclose()
|
|
@ -0,0 +1,26 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import AsyncIterator
|
||||||
|
|
||||||
|
from ._compat import DeprecatedAsyncContextManager
|
||||||
|
from ._eventloop import get_asynclib
|
||||||
|
|
||||||
|
|
||||||
|
def open_signal_receiver(
|
||||||
|
*signals: int,
|
||||||
|
) -> DeprecatedAsyncContextManager[AsyncIterator[int]]:
|
||||||
|
"""
|
||||||
|
Start receiving operating system signals.
|
||||||
|
|
||||||
|
:param signals: signals to receive (e.g. ``signal.SIGINT``)
|
||||||
|
:return: an asynchronous context manager for an asynchronous iterator which yields signal
|
||||||
|
numbers
|
||||||
|
|
||||||
|
.. warning:: Windows does not support signals natively so it is best to avoid relying on this
|
||||||
|
in cross-platform applications.
|
||||||
|
|
||||||
|
.. warning:: On asyncio, this permanently replaces any previous signal handler for the given
|
||||||
|
signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().open_signal_receiver(*signals)
|
|
@ -0,0 +1,607 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import ssl
|
||||||
|
import sys
|
||||||
|
from ipaddress import IPv6Address, ip_address
|
||||||
|
from os import PathLike, chmod
|
||||||
|
from pathlib import Path
|
||||||
|
from socket import AddressFamily, SocketKind
|
||||||
|
from typing import Awaitable, List, Tuple, cast, overload
|
||||||
|
|
||||||
|
from .. import to_thread
|
||||||
|
from ..abc import (
|
||||||
|
ConnectedUDPSocket,
|
||||||
|
IPAddressType,
|
||||||
|
IPSockAddrType,
|
||||||
|
SocketListener,
|
||||||
|
SocketStream,
|
||||||
|
UDPSocket,
|
||||||
|
UNIXSocketStream,
|
||||||
|
)
|
||||||
|
from ..streams.stapled import MultiListener
|
||||||
|
from ..streams.tls import TLSStream
|
||||||
|
from ._eventloop import get_asynclib
|
||||||
|
from ._resources import aclose_forcefully
|
||||||
|
from ._synchronization import Event
|
||||||
|
from ._tasks import create_task_group, move_on_after
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Literal
|
||||||
|
else:
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
|
IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515
|
||||||
|
|
||||||
|
GetAddrInfoReturnType = List[
|
||||||
|
Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]
|
||||||
|
]
|
||||||
|
AnyIPAddressFamily = Literal[
|
||||||
|
AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6
|
||||||
|
]
|
||||||
|
IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
|
||||||
|
|
||||||
|
|
||||||
|
# tls_hostname given
|
||||||
|
@overload
|
||||||
|
async def connect_tcp(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = ...,
|
||||||
|
ssl_context: ssl.SSLContext | None = ...,
|
||||||
|
tls_standard_compatible: bool = ...,
|
||||||
|
tls_hostname: str,
|
||||||
|
happy_eyeballs_delay: float = ...,
|
||||||
|
) -> TLSStream:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# ssl_context given
|
||||||
|
@overload
|
||||||
|
async def connect_tcp(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = ...,
|
||||||
|
ssl_context: ssl.SSLContext,
|
||||||
|
tls_standard_compatible: bool = ...,
|
||||||
|
tls_hostname: str | None = ...,
|
||||||
|
happy_eyeballs_delay: float = ...,
|
||||||
|
) -> TLSStream:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# tls=True
|
||||||
|
@overload
|
||||||
|
async def connect_tcp(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = ...,
|
||||||
|
tls: Literal[True],
|
||||||
|
ssl_context: ssl.SSLContext | None = ...,
|
||||||
|
tls_standard_compatible: bool = ...,
|
||||||
|
tls_hostname: str | None = ...,
|
||||||
|
happy_eyeballs_delay: float = ...,
|
||||||
|
) -> TLSStream:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# tls=False
|
||||||
|
@overload
|
||||||
|
async def connect_tcp(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = ...,
|
||||||
|
tls: Literal[False],
|
||||||
|
ssl_context: ssl.SSLContext | None = ...,
|
||||||
|
tls_standard_compatible: bool = ...,
|
||||||
|
tls_hostname: str | None = ...,
|
||||||
|
happy_eyeballs_delay: float = ...,
|
||||||
|
) -> SocketStream:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# No TLS arguments
|
||||||
|
@overload
|
||||||
|
async def connect_tcp(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = ...,
|
||||||
|
happy_eyeballs_delay: float = ...,
|
||||||
|
) -> SocketStream:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
async def connect_tcp(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = None,
|
||||||
|
tls: bool = False,
|
||||||
|
ssl_context: ssl.SSLContext | None = None,
|
||||||
|
tls_standard_compatible: bool = True,
|
||||||
|
tls_hostname: str | None = None,
|
||||||
|
happy_eyeballs_delay: float = 0.25,
|
||||||
|
) -> SocketStream | TLSStream:
|
||||||
|
"""
|
||||||
|
Connect to a host using the TCP protocol.
|
||||||
|
|
||||||
|
This function implements the stateless version of the Happy Eyeballs algorithm (RFC
|
||||||
|
6555). If ``remote_host`` is a host name that resolves to multiple IP addresses,
|
||||||
|
each one is tried until one connection attempt succeeds. If the first attempt does
|
||||||
|
not connected within 250 milliseconds, a second attempt is started using the next
|
||||||
|
address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if
|
||||||
|
available) is tried first.
|
||||||
|
|
||||||
|
When the connection has been established, a TLS handshake will be done if either
|
||||||
|
``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
|
||||||
|
|
||||||
|
:param remote_host: the IP address or host name to connect to
|
||||||
|
:param remote_port: port on the target host to connect to
|
||||||
|
:param local_host: the interface address or name to bind the socket to before connecting
|
||||||
|
:param tls: ``True`` to do a TLS handshake with the connected stream and return a
|
||||||
|
:class:`~anyio.streams.tls.TLSStream` instead
|
||||||
|
:param ssl_context: the SSL context object to use (if omitted, a default context is created)
|
||||||
|
:param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing
|
||||||
|
the stream and requires that the server does this as well. Otherwise,
|
||||||
|
:exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
|
||||||
|
Some protocols, such as HTTP, require this option to be ``False``.
|
||||||
|
See :meth:`~ssl.SSLContext.wrap_socket` for details.
|
||||||
|
:param tls_hostname: host name to check the server certificate against (defaults to the value
|
||||||
|
of ``remote_host``)
|
||||||
|
:param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt
|
||||||
|
:return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
|
||||||
|
:raises OSError: if the connection attempt fails
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Placed here due to https://github.com/python/mypy/issues/7057
|
||||||
|
connected_stream: SocketStream | None = None
|
||||||
|
|
||||||
|
async def try_connect(remote_host: str, event: Event) -> None:
|
||||||
|
nonlocal connected_stream
|
||||||
|
try:
|
||||||
|
stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
|
||||||
|
except OSError as exc:
|
||||||
|
oserrors.append(exc)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if connected_stream is None:
|
||||||
|
connected_stream = stream
|
||||||
|
tg.cancel_scope.cancel()
|
||||||
|
else:
|
||||||
|
await stream.aclose()
|
||||||
|
finally:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
asynclib = get_asynclib()
|
||||||
|
local_address: IPSockAddrType | None = None
|
||||||
|
family = socket.AF_UNSPEC
|
||||||
|
if local_host:
|
||||||
|
gai_res = await getaddrinfo(str(local_host), None)
|
||||||
|
family, *_, local_address = gai_res[0]
|
||||||
|
|
||||||
|
target_host = str(remote_host)
|
||||||
|
try:
|
||||||
|
addr_obj = ip_address(remote_host)
|
||||||
|
except ValueError:
|
||||||
|
# getaddrinfo() will raise an exception if name resolution fails
|
||||||
|
gai_res = await getaddrinfo(
|
||||||
|
target_host, remote_port, family=family, type=socket.SOCK_STREAM
|
||||||
|
)
|
||||||
|
|
||||||
|
# Organize the list so that the first address is an IPv6 address (if available) and the
|
||||||
|
# second one is an IPv4 addresses. The rest can be in whatever order.
|
||||||
|
v6_found = v4_found = False
|
||||||
|
target_addrs: list[tuple[socket.AddressFamily, str]] = []
|
||||||
|
for af, *rest, sa in gai_res:
|
||||||
|
if af == socket.AF_INET6 and not v6_found:
|
||||||
|
v6_found = True
|
||||||
|
target_addrs.insert(0, (af, sa[0]))
|
||||||
|
elif af == socket.AF_INET and not v4_found and v6_found:
|
||||||
|
v4_found = True
|
||||||
|
target_addrs.insert(1, (af, sa[0]))
|
||||||
|
else:
|
||||||
|
target_addrs.append((af, sa[0]))
|
||||||
|
else:
|
||||||
|
if isinstance(addr_obj, IPv6Address):
|
||||||
|
target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
|
||||||
|
else:
|
||||||
|
target_addrs = [(socket.AF_INET, addr_obj.compressed)]
|
||||||
|
|
||||||
|
oserrors: list[OSError] = []
|
||||||
|
async with create_task_group() as tg:
|
||||||
|
for i, (af, addr) in enumerate(target_addrs):
|
||||||
|
event = Event()
|
||||||
|
tg.start_soon(try_connect, addr, event)
|
||||||
|
with move_on_after(happy_eyeballs_delay):
|
||||||
|
await event.wait()
|
||||||
|
|
||||||
|
if connected_stream is None:
|
||||||
|
cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors)
|
||||||
|
raise OSError("All connection attempts failed") from cause
|
||||||
|
|
||||||
|
if tls or tls_hostname or ssl_context:
|
||||||
|
try:
|
||||||
|
return await TLSStream.wrap(
|
||||||
|
connected_stream,
|
||||||
|
server_side=False,
|
||||||
|
hostname=tls_hostname or str(remote_host),
|
||||||
|
ssl_context=ssl_context,
|
||||||
|
standard_compatible=tls_standard_compatible,
|
||||||
|
)
|
||||||
|
except BaseException:
|
||||||
|
await aclose_forcefully(connected_stream)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return connected_stream
|
||||||
|
|
||||||
|
|
||||||
|
async def connect_unix(path: str | PathLike[str]) -> UNIXSocketStream:
|
||||||
|
"""
|
||||||
|
Connect to the given UNIX socket.
|
||||||
|
|
||||||
|
Not available on Windows.
|
||||||
|
|
||||||
|
:param path: path to the socket
|
||||||
|
:return: a socket stream object
|
||||||
|
|
||||||
|
"""
|
||||||
|
path = str(Path(path))
|
||||||
|
return await get_asynclib().connect_unix(path)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_tcp_listener(
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = None,
|
||||||
|
local_port: int = 0,
|
||||||
|
family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC,
|
||||||
|
backlog: int = 65536,
|
||||||
|
reuse_port: bool = False,
|
||||||
|
) -> MultiListener[SocketStream]:
|
||||||
|
"""
|
||||||
|
Create a TCP socket listener.
|
||||||
|
|
||||||
|
:param local_port: port number to listen on
|
||||||
|
:param local_host: IP address of the interface to listen on. If omitted, listen on
|
||||||
|
all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address
|
||||||
|
family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6.
|
||||||
|
:param family: address family (used if ``local_host`` was omitted)
|
||||||
|
:param backlog: maximum number of queued incoming connections (up to a maximum of
|
||||||
|
2**16, or 65536)
|
||||||
|
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
|
||||||
|
address/port (not supported on Windows)
|
||||||
|
:return: a list of listener objects
|
||||||
|
|
||||||
|
"""
|
||||||
|
asynclib = get_asynclib()
|
||||||
|
backlog = min(backlog, 65536)
|
||||||
|
local_host = str(local_host) if local_host is not None else None
|
||||||
|
gai_res = await getaddrinfo(
|
||||||
|
local_host, # type: ignore[arg-type]
|
||||||
|
local_port,
|
||||||
|
family=family,
|
||||||
|
type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0,
|
||||||
|
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
|
||||||
|
)
|
||||||
|
listeners: list[SocketListener] = []
|
||||||
|
try:
|
||||||
|
# The set() is here to work around a glibc bug:
|
||||||
|
# https://sourceware.org/bugzilla/show_bug.cgi?id=14969
|
||||||
|
sockaddr: tuple[str, int] | tuple[str, int, int, int]
|
||||||
|
for fam, kind, *_, sockaddr in sorted(set(gai_res)):
|
||||||
|
# Workaround for an uvloop bug where we don't get the correct scope ID for
|
||||||
|
# IPv6 link-local addresses when passing type=socket.SOCK_STREAM to
|
||||||
|
# getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539
|
||||||
|
if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raw_socket = socket.socket(fam)
|
||||||
|
raw_socket.setblocking(False)
|
||||||
|
|
||||||
|
# For Windows, enable exclusive address use. For others, enable address reuse.
|
||||||
|
if sys.platform == "win32":
|
||||||
|
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
|
||||||
|
else:
|
||||||
|
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
if reuse_port:
|
||||||
|
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||||
|
|
||||||
|
# If only IPv6 was requested, disable dual stack operation
|
||||||
|
if fam == socket.AF_INET6:
|
||||||
|
raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
|
||||||
|
|
||||||
|
# Workaround for #554
|
||||||
|
if "%" in sockaddr[0]:
|
||||||
|
addr, scope_id = sockaddr[0].split("%", 1)
|
||||||
|
sockaddr = (addr, sockaddr[1], 0, int(scope_id))
|
||||||
|
|
||||||
|
raw_socket.bind(sockaddr)
|
||||||
|
raw_socket.listen(backlog)
|
||||||
|
listener = asynclib.TCPSocketListener(raw_socket)
|
||||||
|
listeners.append(listener)
|
||||||
|
except BaseException:
|
||||||
|
for listener in listeners:
|
||||||
|
await listener.aclose()
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
return MultiListener(listeners)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_unix_listener(
|
||||||
|
path: str | PathLike[str],
|
||||||
|
*,
|
||||||
|
mode: int | None = None,
|
||||||
|
backlog: int = 65536,
|
||||||
|
) -> SocketListener:
|
||||||
|
"""
|
||||||
|
Create a UNIX socket listener.
|
||||||
|
|
||||||
|
Not available on Windows.
|
||||||
|
|
||||||
|
:param path: path of the socket
|
||||||
|
:param mode: permissions to set on the socket
|
||||||
|
:param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or
|
||||||
|
65536)
|
||||||
|
:return: a listener object
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
If a socket already exists on the file system in the given path, it will be removed first.
|
||||||
|
|
||||||
|
"""
|
||||||
|
path_str = str(path)
|
||||||
|
path = Path(path)
|
||||||
|
if path.is_socket():
|
||||||
|
path.unlink()
|
||||||
|
|
||||||
|
backlog = min(backlog, 65536)
|
||||||
|
raw_socket = socket.socket(socket.AF_UNIX)
|
||||||
|
raw_socket.setblocking(False)
|
||||||
|
try:
|
||||||
|
await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True)
|
||||||
|
if mode is not None:
|
||||||
|
await to_thread.run_sync(chmod, path_str, mode, cancellable=True)
|
||||||
|
|
||||||
|
raw_socket.listen(backlog)
|
||||||
|
return get_asynclib().UNIXSocketListener(raw_socket)
|
||||||
|
except BaseException:
|
||||||
|
raw_socket.close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def create_udp_socket(
|
||||||
|
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
|
||||||
|
*,
|
||||||
|
local_host: IPAddressType | None = None,
|
||||||
|
local_port: int = 0,
|
||||||
|
reuse_port: bool = False,
|
||||||
|
) -> UDPSocket:
|
||||||
|
"""
|
||||||
|
Create a UDP socket.
|
||||||
|
|
||||||
|
If ``local_port`` has been given, the socket will be bound to this port on the local
|
||||||
|
machine, making this socket suitable for providing UDP based services.
|
||||||
|
|
||||||
|
:param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from
|
||||||
|
``local_host`` if omitted
|
||||||
|
:param local_host: IP address or host name of the local interface to bind to
|
||||||
|
:param local_port: local port to bind to
|
||||||
|
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
|
||||||
|
(not supported on Windows)
|
||||||
|
:return: a UDP socket
|
||||||
|
|
||||||
|
"""
|
||||||
|
if family is AddressFamily.AF_UNSPEC and not local_host:
|
||||||
|
raise ValueError('Either "family" or "local_host" must be given')
|
||||||
|
|
||||||
|
if local_host:
|
||||||
|
gai_res = await getaddrinfo(
|
||||||
|
str(local_host),
|
||||||
|
local_port,
|
||||||
|
family=family,
|
||||||
|
type=socket.SOCK_DGRAM,
|
||||||
|
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
|
||||||
|
)
|
||||||
|
family = cast(AnyIPAddressFamily, gai_res[0][0])
|
||||||
|
local_address = gai_res[0][-1]
|
||||||
|
elif family is AddressFamily.AF_INET6:
|
||||||
|
local_address = ("::", 0)
|
||||||
|
else:
|
||||||
|
local_address = ("0.0.0.0", 0)
|
||||||
|
|
||||||
|
return await get_asynclib().create_udp_socket(
|
||||||
|
family, local_address, None, reuse_port
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_connected_udp_socket(
|
||||||
|
remote_host: IPAddressType,
|
||||||
|
remote_port: int,
|
||||||
|
*,
|
||||||
|
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
|
||||||
|
local_host: IPAddressType | None = None,
|
||||||
|
local_port: int = 0,
|
||||||
|
reuse_port: bool = False,
|
||||||
|
) -> ConnectedUDPSocket:
|
||||||
|
"""
|
||||||
|
Create a connected UDP socket.
|
||||||
|
|
||||||
|
Connected UDP sockets can only communicate with the specified remote host/port, and any packets
|
||||||
|
sent from other sources are dropped.
|
||||||
|
|
||||||
|
:param remote_host: remote host to set as the default target
|
||||||
|
:param remote_port: port on the remote host to set as the default target
|
||||||
|
:param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from
|
||||||
|
``local_host`` or ``remote_host`` if omitted
|
||||||
|
:param local_host: IP address or host name of the local interface to bind to
|
||||||
|
:param local_port: local port to bind to
|
||||||
|
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
|
||||||
|
(not supported on Windows)
|
||||||
|
:return: a connected UDP socket
|
||||||
|
|
||||||
|
"""
|
||||||
|
local_address = None
|
||||||
|
if local_host:
|
||||||
|
gai_res = await getaddrinfo(
|
||||||
|
str(local_host),
|
||||||
|
local_port,
|
||||||
|
family=family,
|
||||||
|
type=socket.SOCK_DGRAM,
|
||||||
|
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
|
||||||
|
)
|
||||||
|
family = cast(AnyIPAddressFamily, gai_res[0][0])
|
||||||
|
local_address = gai_res[0][-1]
|
||||||
|
|
||||||
|
gai_res = await getaddrinfo(
|
||||||
|
str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM
|
||||||
|
)
|
||||||
|
family = cast(AnyIPAddressFamily, gai_res[0][0])
|
||||||
|
remote_address = gai_res[0][-1]
|
||||||
|
|
||||||
|
return await get_asynclib().create_udp_socket(
|
||||||
|
family, local_address, remote_address, reuse_port
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def getaddrinfo(
|
||||||
|
host: bytearray | bytes | str,
|
||||||
|
port: str | int | None,
|
||||||
|
*,
|
||||||
|
family: int | AddressFamily = 0,
|
||||||
|
type: int | SocketKind = 0,
|
||||||
|
proto: int = 0,
|
||||||
|
flags: int = 0,
|
||||||
|
) -> GetAddrInfoReturnType:
|
||||||
|
"""
|
||||||
|
Look up a numeric IP address given a host name.
|
||||||
|
|
||||||
|
Internationalized domain names are translated according to the (non-transitional) IDNA 2008
|
||||||
|
standard.
|
||||||
|
|
||||||
|
.. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
|
||||||
|
(host, port), unlike what :func:`socket.getaddrinfo` does.
|
||||||
|
|
||||||
|
:param host: host name
|
||||||
|
:param port: port number
|
||||||
|
:param family: socket family (`'AF_INET``, ...)
|
||||||
|
:param type: socket type (``SOCK_STREAM``, ...)
|
||||||
|
:param proto: protocol number
|
||||||
|
:param flags: flags to pass to upstream ``getaddrinfo()``
|
||||||
|
:return: list of tuples containing (family, type, proto, canonname, sockaddr)
|
||||||
|
|
||||||
|
.. seealso:: :func:`socket.getaddrinfo`
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Handle unicode hostnames
|
||||||
|
if isinstance(host, str):
|
||||||
|
try:
|
||||||
|
encoded_host = host.encode("ascii")
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
import idna
|
||||||
|
|
||||||
|
encoded_host = idna.encode(host, uts46=True)
|
||||||
|
else:
|
||||||
|
encoded_host = host
|
||||||
|
|
||||||
|
gai_res = await get_asynclib().getaddrinfo(
|
||||||
|
encoded_host, port, family=family, type=type, proto=proto, flags=flags
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
|
||||||
|
for family, type, proto, canonname, sockaddr in gai_res
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Look up the host name of an IP address.
|
||||||
|
|
||||||
|
:param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
|
||||||
|
:param flags: flags to pass to upstream ``getnameinfo()``
|
||||||
|
:return: a tuple of (host name, service name)
|
||||||
|
|
||||||
|
.. seealso:: :func:`socket.getnameinfo`
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().getnameinfo(sockaddr, flags)
|
||||||
|
|
||||||
|
|
||||||
|
def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
|
||||||
|
"""
|
||||||
|
Wait until the given socket has data to be read.
|
||||||
|
|
||||||
|
This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
|
||||||
|
(default on py3.8+).
|
||||||
|
|
||||||
|
.. warning:: Only use this on raw sockets that have not been wrapped by any higher level
|
||||||
|
constructs like socket streams!
|
||||||
|
|
||||||
|
:param sock: a socket object
|
||||||
|
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
|
||||||
|
socket to become readable
|
||||||
|
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
|
||||||
|
to become readable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().wait_socket_readable(sock)
|
||||||
|
|
||||||
|
|
||||||
|
def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
|
||||||
|
"""
|
||||||
|
Wait until the given socket can be written to.
|
||||||
|
|
||||||
|
This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
|
||||||
|
(default on py3.8+).
|
||||||
|
|
||||||
|
.. warning:: Only use this on raw sockets that have not been wrapped by any higher level
|
||||||
|
constructs like socket streams!
|
||||||
|
|
||||||
|
:param sock: a socket object
|
||||||
|
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
|
||||||
|
socket to become writable
|
||||||
|
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
|
||||||
|
to become writable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().wait_socket_writable(sock)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Private API
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def convert_ipv6_sockaddr(
|
||||||
|
sockaddr: tuple[str, int, int, int] | tuple[str, int]
|
||||||
|
) -> tuple[str, int]:
|
||||||
|
"""
|
||||||
|
Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
|
||||||
|
|
||||||
|
If the scope ID is nonzero, it is added to the address, separated with ``%``.
|
||||||
|
Otherwise the flow id and scope id are simply cut off from the tuple.
|
||||||
|
Any other kinds of socket addresses are returned as-is.
|
||||||
|
|
||||||
|
:param sockaddr: the result of :meth:`~socket.socket.getsockname`
|
||||||
|
:return: the converted socket address
|
||||||
|
|
||||||
|
"""
|
||||||
|
# This is more complicated than it should be because of MyPy
|
||||||
|
if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
|
||||||
|
host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr)
|
||||||
|
if scope_id:
|
||||||
|
# PyPy (as of v7.3.11) leaves the interface name in the result, so
|
||||||
|
# we discard it and only get the scope ID from the end
|
||||||
|
# (https://foss.heptapod.net/pypy/pypy/-/issues/3938)
|
||||||
|
host = host.split("%")[0]
|
||||||
|
|
||||||
|
# Add scope_id to the address
|
||||||
|
return f"{host}%{scope_id}", port
|
||||||
|
else:
|
||||||
|
return host, port
|
||||||
|
else:
|
||||||
|
return cast(Tuple[str, int], sockaddr)
|
|
@ -0,0 +1,47 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
from typing import Any, TypeVar, overload
|
||||||
|
|
||||||
|
from ..streams.memory import (
|
||||||
|
MemoryObjectReceiveStream,
|
||||||
|
MemoryObjectSendStream,
|
||||||
|
MemoryObjectStreamState,
|
||||||
|
)
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def create_memory_object_stream(
|
||||||
|
max_buffer_size: float = ...,
|
||||||
|
) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def create_memory_object_stream(
|
||||||
|
max_buffer_size: float = ..., item_type: type[T_Item] = ...
|
||||||
|
) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def create_memory_object_stream(
|
||||||
|
max_buffer_size: float = 0, item_type: type[T_Item] | None = None
|
||||||
|
) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
|
||||||
|
"""
|
||||||
|
Create a memory object stream.
|
||||||
|
|
||||||
|
:param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking
|
||||||
|
:param item_type: type of item, for marking the streams with the right generic type for
|
||||||
|
static typing (not used at run time)
|
||||||
|
:return: a tuple of (send stream, receive stream)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
|
||||||
|
raise ValueError("max_buffer_size must be either an integer or math.inf")
|
||||||
|
if max_buffer_size < 0:
|
||||||
|
raise ValueError("max_buffer_size cannot be negative")
|
||||||
|
|
||||||
|
state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size)
|
||||||
|
return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)
|
|
@ -0,0 +1,135 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
|
from os import PathLike
|
||||||
|
from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
Any,
|
||||||
|
AsyncIterable,
|
||||||
|
Mapping,
|
||||||
|
Sequence,
|
||||||
|
cast,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..abc import Process
|
||||||
|
from ._eventloop import get_asynclib
|
||||||
|
from ._tasks import create_task_group
|
||||||
|
|
||||||
|
|
||||||
|
async def run_process(
|
||||||
|
command: str | bytes | Sequence[str | bytes],
|
||||||
|
*,
|
||||||
|
input: bytes | None = None,
|
||||||
|
stdout: int | IO[Any] | None = PIPE,
|
||||||
|
stderr: int | IO[Any] | None = PIPE,
|
||||||
|
check: bool = True,
|
||||||
|
cwd: str | bytes | PathLike[str] | None = None,
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
start_new_session: bool = False,
|
||||||
|
) -> CompletedProcess[bytes]:
|
||||||
|
"""
|
||||||
|
Run an external command in a subprocess and wait until it completes.
|
||||||
|
|
||||||
|
.. seealso:: :func:`subprocess.run`
|
||||||
|
|
||||||
|
:param command: either a string to pass to the shell, or an iterable of strings containing the
|
||||||
|
executable name or path and its arguments
|
||||||
|
:param input: bytes passed to the standard input of the subprocess
|
||||||
|
:param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
|
||||||
|
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or
|
||||||
|
:data:`subprocess.STDOUT`
|
||||||
|
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process
|
||||||
|
terminates with a return code other than 0
|
||||||
|
:param cwd: If not ``None``, change the working directory to this before running the command
|
||||||
|
:param env: if not ``None``, this mapping replaces the inherited environment variables from the
|
||||||
|
parent process
|
||||||
|
:param start_new_session: if ``true`` the setsid() system call will be made in the child
|
||||||
|
process prior to the execution of the subprocess. (POSIX only)
|
||||||
|
:return: an object representing the completed process
|
||||||
|
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a
|
||||||
|
nonzero return code
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
|
||||||
|
buffer = BytesIO()
|
||||||
|
async for chunk in stream:
|
||||||
|
buffer.write(chunk)
|
||||||
|
|
||||||
|
stream_contents[index] = buffer.getvalue()
|
||||||
|
|
||||||
|
async with await open_process(
|
||||||
|
command,
|
||||||
|
stdin=PIPE if input else DEVNULL,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
start_new_session=start_new_session,
|
||||||
|
) as process:
|
||||||
|
stream_contents: list[bytes | None] = [None, None]
|
||||||
|
try:
|
||||||
|
async with create_task_group() as tg:
|
||||||
|
if process.stdout:
|
||||||
|
tg.start_soon(drain_stream, process.stdout, 0)
|
||||||
|
if process.stderr:
|
||||||
|
tg.start_soon(drain_stream, process.stderr, 1)
|
||||||
|
if process.stdin and input:
|
||||||
|
await process.stdin.send(input)
|
||||||
|
await process.stdin.aclose()
|
||||||
|
|
||||||
|
await process.wait()
|
||||||
|
except BaseException:
|
||||||
|
process.kill()
|
||||||
|
raise
|
||||||
|
|
||||||
|
output, errors = stream_contents
|
||||||
|
if check and process.returncode != 0:
|
||||||
|
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
|
||||||
|
|
||||||
|
return CompletedProcess(command, cast(int, process.returncode), output, errors)
|
||||||
|
|
||||||
|
|
||||||
|
async def open_process(
|
||||||
|
command: str | bytes | Sequence[str | bytes],
|
||||||
|
*,
|
||||||
|
stdin: int | IO[Any] | None = PIPE,
|
||||||
|
stdout: int | IO[Any] | None = PIPE,
|
||||||
|
stderr: int | IO[Any] | None = PIPE,
|
||||||
|
cwd: str | bytes | PathLike[str] | None = None,
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
start_new_session: bool = False,
|
||||||
|
) -> Process:
|
||||||
|
"""
|
||||||
|
Start an external command in a subprocess.
|
||||||
|
|
||||||
|
.. seealso:: :class:`subprocess.Popen`
|
||||||
|
|
||||||
|
:param command: either a string to pass to the shell, or an iterable of strings containing the
|
||||||
|
executable name or path and its arguments
|
||||||
|
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
|
||||||
|
file-like object, or ``None``
|
||||||
|
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
a file-like object, or ``None``
|
||||||
|
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
:data:`subprocess.STDOUT`, a file-like object, or ``None``
|
||||||
|
:param cwd: If not ``None``, the working directory is changed before executing
|
||||||
|
:param env: If env is not ``None``, it must be a mapping that defines the environment
|
||||||
|
variables for the new process
|
||||||
|
:param start_new_session: if ``true`` the setsid() system call will be made in the child
|
||||||
|
process prior to the execution of the subprocess. (POSIX only)
|
||||||
|
:return: an asynchronous process object
|
||||||
|
|
||||||
|
"""
|
||||||
|
shell = isinstance(command, str)
|
||||||
|
return await get_asynclib().open_process(
|
||||||
|
command,
|
||||||
|
shell=shell,
|
||||||
|
stdin=stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
start_new_session=start_new_session,
|
||||||
|
)
|
|
@ -0,0 +1,596 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections import deque
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from types import TracebackType
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled
|
||||||
|
from ._compat import DeprecatedAwaitable
|
||||||
|
from ._eventloop import get_asynclib
|
||||||
|
from ._exceptions import BusyResourceError, WouldBlock
|
||||||
|
from ._tasks import CancelScope
|
||||||
|
from ._testing import TaskInfo, get_current_task
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class EventStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
|
||||||
|
"""
|
||||||
|
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class CapacityLimiterStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int borrowed_tokens: number of tokens currently borrowed by tasks
|
||||||
|
:ivar float total_tokens: total number of available tokens
|
||||||
|
:ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this
|
||||||
|
limiter
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or
|
||||||
|
:meth:`~.CapacityLimiter.acquire_on_behalf_of`
|
||||||
|
"""
|
||||||
|
|
||||||
|
borrowed_tokens: int
|
||||||
|
total_tokens: float
|
||||||
|
borrowers: tuple[object, ...]
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class LockStatistics:
|
||||||
|
"""
|
||||||
|
:ivar bool locked: flag indicating if this lock is locked or not
|
||||||
|
:ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not
|
||||||
|
held by any task)
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
|
||||||
|
"""
|
||||||
|
|
||||||
|
locked: bool
|
||||||
|
owner: TaskInfo | None
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ConditionStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
|
||||||
|
:ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock`
|
||||||
|
"""
|
||||||
|
|
||||||
|
tasks_waiting: int
|
||||||
|
lock_statistics: LockStatistics
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SemaphoreStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
class Event:
|
||||||
|
def __new__(cls) -> Event:
|
||||||
|
return get_asynclib().Event()
|
||||||
|
|
||||||
|
def set(self) -> DeprecatedAwaitable:
|
||||||
|
"""Set the flag, notifying all listeners."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_set(self) -> bool:
|
||||||
|
"""Return ``True`` if the flag is set, ``False`` if not."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def wait(self) -> None:
|
||||||
|
"""
|
||||||
|
Wait until the flag has been set.
|
||||||
|
|
||||||
|
If the flag has already been set when this method is called, it returns immediately.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def statistics(self) -> EventStatistics:
|
||||||
|
"""Return statistics about the current state of this event."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class Lock:
|
||||||
|
_owner_task: TaskInfo | None = None
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._waiters: deque[tuple[TaskInfo, Event]] = deque()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
await self.acquire()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Acquire the lock."""
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
try:
|
||||||
|
self.acquire_nowait()
|
||||||
|
except WouldBlock:
|
||||||
|
task = get_current_task()
|
||||||
|
event = Event()
|
||||||
|
token = task, event
|
||||||
|
self._waiters.append(token)
|
||||||
|
try:
|
||||||
|
await event.wait()
|
||||||
|
except BaseException:
|
||||||
|
if not event.is_set():
|
||||||
|
self._waiters.remove(token)
|
||||||
|
elif self._owner_task == task:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
assert self._owner_task == task
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
await cancel_shielded_checkpoint()
|
||||||
|
except BaseException:
|
||||||
|
self.release()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
task = get_current_task()
|
||||||
|
if self._owner_task == task:
|
||||||
|
raise RuntimeError("Attempted to acquire an already held Lock")
|
||||||
|
|
||||||
|
if self._owner_task is not None:
|
||||||
|
raise WouldBlock
|
||||||
|
|
||||||
|
self._owner_task = task
|
||||||
|
|
||||||
|
def release(self) -> DeprecatedAwaitable:
|
||||||
|
"""Release the lock."""
|
||||||
|
if self._owner_task != get_current_task():
|
||||||
|
raise RuntimeError("The current task is not holding this lock")
|
||||||
|
|
||||||
|
if self._waiters:
|
||||||
|
self._owner_task, event = self._waiters.popleft()
|
||||||
|
event.set()
|
||||||
|
else:
|
||||||
|
del self._owner_task
|
||||||
|
|
||||||
|
return DeprecatedAwaitable(self.release)
|
||||||
|
|
||||||
|
def locked(self) -> bool:
|
||||||
|
"""Return True if the lock is currently held."""
|
||||||
|
return self._owner_task is not None
|
||||||
|
|
||||||
|
def statistics(self) -> LockStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this lock.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return LockStatistics(self.locked(), self._owner_task, len(self._waiters))
|
||||||
|
|
||||||
|
|
||||||
|
class Condition:
|
||||||
|
_owner_task: TaskInfo | None = None
|
||||||
|
|
||||||
|
def __init__(self, lock: Lock | None = None):
|
||||||
|
self._lock = lock or Lock()
|
||||||
|
self._waiters: deque[Event] = deque()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
await self.acquire()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
def _check_acquired(self) -> None:
|
||||||
|
if self._owner_task != get_current_task():
|
||||||
|
raise RuntimeError("The current task is not holding the underlying lock")
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Acquire the underlying lock."""
|
||||||
|
await self._lock.acquire()
|
||||||
|
self._owner_task = get_current_task()
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the underlying lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._lock.acquire_nowait()
|
||||||
|
self._owner_task = get_current_task()
|
||||||
|
|
||||||
|
def release(self) -> DeprecatedAwaitable:
|
||||||
|
"""Release the underlying lock."""
|
||||||
|
self._lock.release()
|
||||||
|
return DeprecatedAwaitable(self.release)
|
||||||
|
|
||||||
|
def locked(self) -> bool:
|
||||||
|
"""Return True if the lock is set."""
|
||||||
|
return self._lock.locked()
|
||||||
|
|
||||||
|
def notify(self, n: int = 1) -> None:
|
||||||
|
"""Notify exactly n listeners."""
|
||||||
|
self._check_acquired()
|
||||||
|
for _ in range(n):
|
||||||
|
try:
|
||||||
|
event = self._waiters.popleft()
|
||||||
|
except IndexError:
|
||||||
|
break
|
||||||
|
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
def notify_all(self) -> None:
|
||||||
|
"""Notify all the listeners."""
|
||||||
|
self._check_acquired()
|
||||||
|
for event in self._waiters:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
self._waiters.clear()
|
||||||
|
|
||||||
|
async def wait(self) -> None:
|
||||||
|
"""Wait for a notification."""
|
||||||
|
await checkpoint()
|
||||||
|
event = Event()
|
||||||
|
self._waiters.append(event)
|
||||||
|
self.release()
|
||||||
|
try:
|
||||||
|
await event.wait()
|
||||||
|
except BaseException:
|
||||||
|
if not event.is_set():
|
||||||
|
self._waiters.remove(event)
|
||||||
|
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
await self.acquire()
|
||||||
|
|
||||||
|
def statistics(self) -> ConditionStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this condition.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return ConditionStatistics(len(self._waiters), self._lock.statistics())
|
||||||
|
|
||||||
|
|
||||||
|
class Semaphore:
|
||||||
|
def __init__(self, initial_value: int, *, max_value: int | None = None):
|
||||||
|
if not isinstance(initial_value, int):
|
||||||
|
raise TypeError("initial_value must be an integer")
|
||||||
|
if initial_value < 0:
|
||||||
|
raise ValueError("initial_value must be >= 0")
|
||||||
|
if max_value is not None:
|
||||||
|
if not isinstance(max_value, int):
|
||||||
|
raise TypeError("max_value must be an integer or None")
|
||||||
|
if max_value < initial_value:
|
||||||
|
raise ValueError(
|
||||||
|
"max_value must be equal to or higher than initial_value"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._value = initial_value
|
||||||
|
self._max_value = max_value
|
||||||
|
self._waiters: deque[Event] = deque()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> Semaphore:
|
||||||
|
await self.acquire()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Decrement the semaphore value, blocking if necessary."""
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
try:
|
||||||
|
self.acquire_nowait()
|
||||||
|
except WouldBlock:
|
||||||
|
event = Event()
|
||||||
|
self._waiters.append(event)
|
||||||
|
try:
|
||||||
|
await event.wait()
|
||||||
|
except BaseException:
|
||||||
|
if not event.is_set():
|
||||||
|
self._waiters.remove(event)
|
||||||
|
else:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
await cancel_shielded_checkpoint()
|
||||||
|
except BaseException:
|
||||||
|
self.release()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the underlying lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._value == 0:
|
||||||
|
raise WouldBlock
|
||||||
|
|
||||||
|
self._value -= 1
|
||||||
|
|
||||||
|
def release(self) -> DeprecatedAwaitable:
|
||||||
|
"""Increment the semaphore value."""
|
||||||
|
if self._max_value is not None and self._value == self._max_value:
|
||||||
|
raise ValueError("semaphore released too many times")
|
||||||
|
|
||||||
|
if self._waiters:
|
||||||
|
self._waiters.popleft().set()
|
||||||
|
else:
|
||||||
|
self._value += 1
|
||||||
|
|
||||||
|
return DeprecatedAwaitable(self.release)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self) -> int:
|
||||||
|
"""The current value of the semaphore."""
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_value(self) -> int | None:
|
||||||
|
"""The maximum value of the semaphore."""
|
||||||
|
return self._max_value
|
||||||
|
|
||||||
|
def statistics(self) -> SemaphoreStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this semaphore.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return SemaphoreStatistics(len(self._waiters))
|
||||||
|
|
||||||
|
|
||||||
|
class CapacityLimiter:
|
||||||
|
def __new__(cls, total_tokens: float) -> CapacityLimiter:
|
||||||
|
return get_asynclib().CapacityLimiter(total_tokens)
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_tokens(self) -> float:
|
||||||
|
"""
|
||||||
|
The total number of tokens available for borrowing.
|
||||||
|
|
||||||
|
This is a read-write property. If the total number of tokens is increased, the
|
||||||
|
proportionate number of tasks waiting on this limiter will be granted their tokens.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The property is now writable.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@total_tokens.setter
|
||||||
|
def total_tokens(self, value: float) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def set_total_tokens(self, value: float) -> None:
|
||||||
|
warn(
|
||||||
|
"CapacityLimiter.set_total_tokens has been deprecated. Set the value of the"
|
||||||
|
'"total_tokens" attribute directly.',
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
self.total_tokens = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def borrowed_tokens(self) -> int:
|
||||||
|
"""The number of tokens that have currently been borrowed."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available_tokens(self) -> float:
|
||||||
|
"""The number of tokens currently available to be borrowed"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> DeprecatedAwaitable:
|
||||||
|
"""
|
||||||
|
Acquire a token for the current task without waiting for one to become available.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
|
||||||
|
"""
|
||||||
|
Acquire a token without waiting for one to become available.
|
||||||
|
|
||||||
|
:param borrower: the entity borrowing a token
|
||||||
|
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire a token for the current task, waiting if necessary for one to become available.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
"""
|
||||||
|
Acquire a token, waiting if necessary for one to become available.
|
||||||
|
|
||||||
|
:param borrower: the entity borrowing a token
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""
|
||||||
|
Release the token held by the current task.
|
||||||
|
:raises RuntimeError: if the current task has not borrowed a token from this limiter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def release_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
"""
|
||||||
|
Release the token held by the given borrower.
|
||||||
|
|
||||||
|
:raises RuntimeError: if the borrower has not borrowed a token from this limiter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def statistics(self) -> CapacityLimiterStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this limiter.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
def create_lock() -> Lock:
|
||||||
|
"""
|
||||||
|
Create an asynchronous lock.
|
||||||
|
|
||||||
|
:return: a lock object
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`~Lock` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning)
|
||||||
|
return Lock()
|
||||||
|
|
||||||
|
|
||||||
|
def create_condition(lock: Lock | None = None) -> Condition:
|
||||||
|
"""
|
||||||
|
Create an asynchronous condition.
|
||||||
|
|
||||||
|
:param lock: the lock to base the condition object on
|
||||||
|
:return: a condition object
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`~Condition` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
"create_condition() is deprecated -- use Condition() directly",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return Condition(lock=lock)
|
||||||
|
|
||||||
|
|
||||||
|
def create_event() -> Event:
|
||||||
|
"""
|
||||||
|
Create an asynchronous event object.
|
||||||
|
|
||||||
|
:return: an event object
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`~Event` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn("create_event() is deprecated -- use Event() directly", DeprecationWarning)
|
||||||
|
return get_asynclib().Event()
|
||||||
|
|
||||||
|
|
||||||
|
def create_semaphore(value: int, *, max_value: int | None = None) -> Semaphore:
|
||||||
|
"""
|
||||||
|
Create an asynchronous semaphore.
|
||||||
|
|
||||||
|
:param value: the semaphore's initial value
|
||||||
|
:param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the
|
||||||
|
semaphore's value would exceed this number
|
||||||
|
:return: a semaphore object
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`~Semaphore` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
"create_semaphore() is deprecated -- use Semaphore() directly",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return Semaphore(value, max_value=max_value)
|
||||||
|
|
||||||
|
|
||||||
|
def create_capacity_limiter(total_tokens: float) -> CapacityLimiter:
|
||||||
|
"""
|
||||||
|
Create a capacity limiter.
|
||||||
|
|
||||||
|
:param total_tokens: the total number of tokens available for borrowing (can be an integer or
|
||||||
|
:data:`math.inf`)
|
||||||
|
:return: a capacity limiter object
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`~CapacityLimiter` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
"create_capacity_limiter() is deprecated -- use CapacityLimiter() directly",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return get_asynclib().CapacityLimiter(total_tokens)
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceGuard:
|
||||||
|
__slots__ = "action", "_guarded"
|
||||||
|
|
||||||
|
def __init__(self, action: str):
|
||||||
|
self.action = action
|
||||||
|
self._guarded = False
|
||||||
|
|
||||||
|
def __enter__(self) -> None:
|
||||||
|
if self._guarded:
|
||||||
|
raise BusyResourceError(self.action)
|
||||||
|
|
||||||
|
self._guarded = True
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
self._guarded = False
|
||||||
|
return None
|
|
@ -0,0 +1,180 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
from types import TracebackType
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
from ..abc._tasks import TaskGroup, TaskStatus
|
||||||
|
from ._compat import (
|
||||||
|
DeprecatedAsyncContextManager,
|
||||||
|
DeprecatedAwaitable,
|
||||||
|
DeprecatedAwaitableFloat,
|
||||||
|
)
|
||||||
|
from ._eventloop import get_asynclib
|
||||||
|
|
||||||
|
|
||||||
|
class _IgnoredTaskStatus(TaskStatus[object]):
|
||||||
|
def started(self, value: object = None) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
TASK_STATUS_IGNORED = _IgnoredTaskStatus()
|
||||||
|
|
||||||
|
|
||||||
|
class CancelScope(DeprecatedAsyncContextManager["CancelScope"]):
|
||||||
|
"""
|
||||||
|
Wraps a unit of work that can be made separately cancellable.
|
||||||
|
|
||||||
|
:param deadline: The time (clock value) when this scope is cancelled automatically
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls, *, deadline: float = math.inf, shield: bool = False
|
||||||
|
) -> CancelScope:
|
||||||
|
return get_asynclib().CancelScope(shield=shield, deadline=deadline)
|
||||||
|
|
||||||
|
def cancel(self) -> DeprecatedAwaitable:
|
||||||
|
"""Cancel this scope immediately."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deadline(self) -> float:
|
||||||
|
"""
|
||||||
|
The time (clock value) when this scope is cancelled automatically.
|
||||||
|
|
||||||
|
Will be ``float('inf')`` if no timeout has been set.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@deadline.setter
|
||||||
|
def deadline(self, value: float) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cancel_called(self) -> bool:
|
||||||
|
"""``True`` if :meth:`cancel` has been called."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def shield(self) -> bool:
|
||||||
|
"""
|
||||||
|
``True`` if this scope is shielded from external cancellation.
|
||||||
|
|
||||||
|
While a scope is shielded, it will not receive cancellations from outside.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@shield.setter
|
||||||
|
def shield(self, value: bool) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __enter__(self) -> CancelScope:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
def open_cancel_scope(*, shield: bool = False) -> CancelScope:
|
||||||
|
"""
|
||||||
|
Open a cancel scope.
|
||||||
|
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
:return: a cancel scope
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`~CancelScope` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
"open_cancel_scope() is deprecated -- use CancelScope() directly",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return get_asynclib().CancelScope(shield=shield)
|
||||||
|
|
||||||
|
|
||||||
|
class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]):
|
||||||
|
def __init__(self, cancel_scope: CancelScope):
|
||||||
|
self._cancel_scope = cancel_scope
|
||||||
|
|
||||||
|
def __enter__(self) -> CancelScope:
|
||||||
|
return self._cancel_scope.__enter__()
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
if self._cancel_scope.cancel_called:
|
||||||
|
raise TimeoutError
|
||||||
|
|
||||||
|
return retval
|
||||||
|
|
||||||
|
|
||||||
|
def fail_after(delay: float | None, shield: bool = False) -> FailAfterContextManager:
|
||||||
|
"""
|
||||||
|
Create a context manager which raises a :class:`TimeoutError` if does not finish in time.
|
||||||
|
|
||||||
|
:param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to
|
||||||
|
disable the timeout
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
:return: a context manager that yields a cancel scope
|
||||||
|
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\]
|
||||||
|
|
||||||
|
"""
|
||||||
|
deadline = (
|
||||||
|
(get_asynclib().current_time() + delay) if delay is not None else math.inf
|
||||||
|
)
|
||||||
|
cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield)
|
||||||
|
return FailAfterContextManager(cancel_scope)
|
||||||
|
|
||||||
|
|
||||||
|
def move_on_after(delay: float | None, shield: bool = False) -> CancelScope:
|
||||||
|
"""
|
||||||
|
Create a cancel scope with a deadline that expires after the given delay.
|
||||||
|
|
||||||
|
:param delay: maximum allowed time (in seconds) before exiting the context block, or ``None``
|
||||||
|
to disable the timeout
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
:return: a cancel scope
|
||||||
|
|
||||||
|
"""
|
||||||
|
deadline = (
|
||||||
|
(get_asynclib().current_time() + delay) if delay is not None else math.inf
|
||||||
|
)
|
||||||
|
return get_asynclib().CancelScope(deadline=deadline, shield=shield)
|
||||||
|
|
||||||
|
|
||||||
|
def current_effective_deadline() -> DeprecatedAwaitableFloat:
|
||||||
|
"""
|
||||||
|
Return the nearest deadline among all the cancel scopes effective for the current task.
|
||||||
|
|
||||||
|
:return: a clock value from the event loop's internal clock (or ``float('inf')`` if
|
||||||
|
there is no deadline in effect, or ``float('-inf')`` if the current scope has
|
||||||
|
been cancelled)
|
||||||
|
:rtype: float
|
||||||
|
|
||||||
|
"""
|
||||||
|
return DeprecatedAwaitableFloat(
|
||||||
|
get_asynclib().current_effective_deadline(), current_effective_deadline
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_task_group() -> TaskGroup:
|
||||||
|
"""
|
||||||
|
Create a task group.
|
||||||
|
|
||||||
|
:return: a task group
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().TaskGroup()
|
|
@ -0,0 +1,82 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Awaitable, Generator
|
||||||
|
|
||||||
|
from ._compat import DeprecatedAwaitableList, _warn_deprecation
|
||||||
|
from ._eventloop import get_asynclib
|
||||||
|
|
||||||
|
|
||||||
|
class TaskInfo:
|
||||||
|
"""
|
||||||
|
Represents an asynchronous task.
|
||||||
|
|
||||||
|
:ivar int id: the unique identifier of the task
|
||||||
|
:ivar parent_id: the identifier of the parent task, if any
|
||||||
|
:vartype parent_id: Optional[int]
|
||||||
|
:ivar str name: the description of the task (if any)
|
||||||
|
:ivar ~collections.abc.Coroutine coro: the coroutine object of the task
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "_name", "id", "parent_id", "name", "coro"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: int,
|
||||||
|
parent_id: int | None,
|
||||||
|
name: str | None,
|
||||||
|
coro: Generator[Any, Any, Any] | Awaitable[Any],
|
||||||
|
):
|
||||||
|
func = get_current_task
|
||||||
|
self._name = f"{func.__module__}.{func.__qualname__}"
|
||||||
|
self.id: int = id
|
||||||
|
self.parent_id: int | None = parent_id
|
||||||
|
self.name: str | None = name
|
||||||
|
self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if isinstance(other, TaskInfo):
|
||||||
|
return self.id == other.id
|
||||||
|
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self.id)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})"
|
||||||
|
|
||||||
|
def __await__(self) -> Generator[None, None, TaskInfo]:
|
||||||
|
_warn_deprecation(self)
|
||||||
|
if False:
|
||||||
|
yield
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _unwrap(self) -> TaskInfo:
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_task() -> TaskInfo:
|
||||||
|
"""
|
||||||
|
Return the current task.
|
||||||
|
|
||||||
|
:return: a representation of the current task
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().get_current_task()
|
||||||
|
|
||||||
|
|
||||||
|
def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]:
|
||||||
|
"""
|
||||||
|
Return a list of running tasks in the current event loop.
|
||||||
|
|
||||||
|
:return: a list of task info objects
|
||||||
|
|
||||||
|
"""
|
||||||
|
tasks = get_asynclib().get_running_tasks()
|
||||||
|
return DeprecatedAwaitableList(tasks, func=get_running_tasks)
|
||||||
|
|
||||||
|
|
||||||
|
async def wait_all_tasks_blocked() -> None:
|
||||||
|
"""Wait until all other tasks are waiting for something."""
|
||||||
|
await get_asynclib().wait_all_tasks_blocked()
|
|
@ -0,0 +1,83 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from typing import Any, Callable, Mapping, TypeVar, overload
|
||||||
|
|
||||||
|
from ._exceptions import TypedAttributeLookupError
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import final
|
||||||
|
else:
|
||||||
|
from typing_extensions import final
|
||||||
|
|
||||||
|
T_Attr = TypeVar("T_Attr")
|
||||||
|
T_Default = TypeVar("T_Default")
|
||||||
|
undefined = object()
|
||||||
|
|
||||||
|
|
||||||
|
def typed_attribute() -> Any:
|
||||||
|
"""Return a unique object, used to mark typed attributes."""
|
||||||
|
return object()
|
||||||
|
|
||||||
|
|
||||||
|
class TypedAttributeSet:
|
||||||
|
"""
|
||||||
|
Superclass for typed attribute collections.
|
||||||
|
|
||||||
|
Checks that every public attribute of every subclass has a type annotation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init_subclass__(cls) -> None:
|
||||||
|
annotations: dict[str, Any] = getattr(cls, "__annotations__", {})
|
||||||
|
for attrname in dir(cls):
|
||||||
|
if not attrname.startswith("_") and attrname not in annotations:
|
||||||
|
raise TypeError(
|
||||||
|
f"Attribute {attrname!r} is missing its type annotation"
|
||||||
|
)
|
||||||
|
|
||||||
|
super().__init_subclass__()
|
||||||
|
|
||||||
|
|
||||||
|
class TypedAttributeProvider:
|
||||||
|
"""Base class for classes that wish to provide typed extra attributes."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
|
||||||
|
"""
|
||||||
|
A mapping of the extra attributes to callables that return the corresponding values.
|
||||||
|
|
||||||
|
If the provider wraps another provider, the attributes from that wrapper should also be
|
||||||
|
included in the returned mapping (but the wrapper may override the callables from the
|
||||||
|
wrapped instance).
|
||||||
|
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def extra(self, attribute: T_Attr) -> T_Attr:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default:
|
||||||
|
...
|
||||||
|
|
||||||
|
@final
|
||||||
|
def extra(self, attribute: Any, default: object = undefined) -> object:
|
||||||
|
"""
|
||||||
|
extra(attribute, default=undefined)
|
||||||
|
|
||||||
|
Return the value of the given typed extra attribute.
|
||||||
|
|
||||||
|
:param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for
|
||||||
|
:param default: the value that should be returned if no value is found for the attribute
|
||||||
|
:raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was
|
||||||
|
given
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.extra_attributes[attribute]()
|
||||||
|
except KeyError:
|
||||||
|
if default is undefined:
|
||||||
|
raise TypedAttributeLookupError("Attribute not found") from None
|
||||||
|
else:
|
||||||
|
return default
|
|
@ -0,0 +1,90 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"AsyncResource",
|
||||||
|
"IPAddressType",
|
||||||
|
"IPSockAddrType",
|
||||||
|
"SocketAttribute",
|
||||||
|
"SocketStream",
|
||||||
|
"SocketListener",
|
||||||
|
"UDPSocket",
|
||||||
|
"UNIXSocketStream",
|
||||||
|
"UDPPacketType",
|
||||||
|
"ConnectedUDPSocket",
|
||||||
|
"UnreliableObjectReceiveStream",
|
||||||
|
"UnreliableObjectSendStream",
|
||||||
|
"UnreliableObjectStream",
|
||||||
|
"ObjectReceiveStream",
|
||||||
|
"ObjectSendStream",
|
||||||
|
"ObjectStream",
|
||||||
|
"ByteReceiveStream",
|
||||||
|
"ByteSendStream",
|
||||||
|
"ByteStream",
|
||||||
|
"AnyUnreliableByteReceiveStream",
|
||||||
|
"AnyUnreliableByteSendStream",
|
||||||
|
"AnyUnreliableByteStream",
|
||||||
|
"AnyByteReceiveStream",
|
||||||
|
"AnyByteSendStream",
|
||||||
|
"AnyByteStream",
|
||||||
|
"Listener",
|
||||||
|
"Process",
|
||||||
|
"Event",
|
||||||
|
"Condition",
|
||||||
|
"Lock",
|
||||||
|
"Semaphore",
|
||||||
|
"CapacityLimiter",
|
||||||
|
"CancelScope",
|
||||||
|
"TaskGroup",
|
||||||
|
"TaskStatus",
|
||||||
|
"TestRunner",
|
||||||
|
"BlockingPortal",
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from ._resources import AsyncResource
|
||||||
|
from ._sockets import (
|
||||||
|
ConnectedUDPSocket,
|
||||||
|
IPAddressType,
|
||||||
|
IPSockAddrType,
|
||||||
|
SocketAttribute,
|
||||||
|
SocketListener,
|
||||||
|
SocketStream,
|
||||||
|
UDPPacketType,
|
||||||
|
UDPSocket,
|
||||||
|
UNIXSocketStream,
|
||||||
|
)
|
||||||
|
from ._streams import (
|
||||||
|
AnyByteReceiveStream,
|
||||||
|
AnyByteSendStream,
|
||||||
|
AnyByteStream,
|
||||||
|
AnyUnreliableByteReceiveStream,
|
||||||
|
AnyUnreliableByteSendStream,
|
||||||
|
AnyUnreliableByteStream,
|
||||||
|
ByteReceiveStream,
|
||||||
|
ByteSendStream,
|
||||||
|
ByteStream,
|
||||||
|
Listener,
|
||||||
|
ObjectReceiveStream,
|
||||||
|
ObjectSendStream,
|
||||||
|
ObjectStream,
|
||||||
|
UnreliableObjectReceiveStream,
|
||||||
|
UnreliableObjectSendStream,
|
||||||
|
UnreliableObjectStream,
|
||||||
|
)
|
||||||
|
from ._subprocesses import Process
|
||||||
|
from ._tasks import TaskGroup, TaskStatus
|
||||||
|
from ._testing import TestRunner
|
||||||
|
|
||||||
|
# Re-exported here, for backwards compatibility
|
||||||
|
# isort: off
|
||||||
|
from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore
|
||||||
|
from .._core._tasks import CancelScope
|
||||||
|
from ..from_thread import BlockingPortal
|
||||||
|
|
||||||
|
# Re-export imports so they look like they live directly in this package
|
||||||
|
key: str
|
||||||
|
value: Any
|
||||||
|
for key, value in list(locals().items()):
|
||||||
|
if getattr(value, "__module__", "").startswith("anyio.abc."):
|
||||||
|
value.__module__ = __name__
|
|
@ -0,0 +1,31 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncResource(metaclass=ABCMeta):
|
||||||
|
"""
|
||||||
|
Abstract base class for all closeable asynchronous resources.
|
||||||
|
|
||||||
|
Works as an asynchronous context manager which returns the instance itself on enter, and calls
|
||||||
|
:meth:`aclose` on exit.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def __aenter__(self: T) -> T:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
await self.aclose()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
"""Close the resource."""
|
|
@ -0,0 +1,160 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import socket
|
||||||
|
from abc import abstractmethod
|
||||||
|
from contextlib import AsyncExitStack
|
||||||
|
from io import IOBase
|
||||||
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
|
from socket import AddressFamily
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Collection,
|
||||||
|
Mapping,
|
||||||
|
Tuple,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .._core._tasks import create_task_group
|
||||||
|
from .._core._typedattr import (
|
||||||
|
TypedAttributeProvider,
|
||||||
|
TypedAttributeSet,
|
||||||
|
typed_attribute,
|
||||||
|
)
|
||||||
|
from ._streams import ByteStream, Listener, UnreliableObjectStream
|
||||||
|
from ._tasks import TaskGroup
|
||||||
|
|
||||||
|
IPAddressType = Union[str, IPv4Address, IPv6Address]
|
||||||
|
IPSockAddrType = Tuple[str, int]
|
||||||
|
SockAddrType = Union[IPSockAddrType, str]
|
||||||
|
UDPPacketType = Tuple[bytes, IPSockAddrType]
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
|
||||||
|
|
||||||
|
class SocketAttribute(TypedAttributeSet):
|
||||||
|
#: the address family of the underlying socket
|
||||||
|
family: AddressFamily = typed_attribute()
|
||||||
|
#: the local socket address of the underlying socket
|
||||||
|
local_address: SockAddrType = typed_attribute()
|
||||||
|
#: for IP addresses, the local port the underlying socket is bound to
|
||||||
|
local_port: int = typed_attribute()
|
||||||
|
#: the underlying stdlib socket object
|
||||||
|
raw_socket: socket.socket = typed_attribute()
|
||||||
|
#: the remote address the underlying socket is connected to
|
||||||
|
remote_address: SockAddrType = typed_attribute()
|
||||||
|
#: for IP addresses, the remote port the underlying socket is connected to
|
||||||
|
remote_port: int = typed_attribute()
|
||||||
|
|
||||||
|
|
||||||
|
class _SocketProvider(TypedAttributeProvider):
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
from .._core._sockets import convert_ipv6_sockaddr as convert
|
||||||
|
|
||||||
|
attributes: dict[Any, Callable[[], Any]] = {
|
||||||
|
SocketAttribute.family: lambda: self._raw_socket.family,
|
||||||
|
SocketAttribute.local_address: lambda: convert(
|
||||||
|
self._raw_socket.getsockname()
|
||||||
|
),
|
||||||
|
SocketAttribute.raw_socket: lambda: self._raw_socket,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
peername: tuple[str, int] | None = convert(self._raw_socket.getpeername())
|
||||||
|
except OSError:
|
||||||
|
peername = None
|
||||||
|
|
||||||
|
# Provide the remote address for connected sockets
|
||||||
|
if peername is not None:
|
||||||
|
attributes[SocketAttribute.remote_address] = lambda: peername
|
||||||
|
|
||||||
|
# Provide local and remote ports for IP based sockets
|
||||||
|
if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6):
|
||||||
|
attributes[
|
||||||
|
SocketAttribute.local_port
|
||||||
|
] = lambda: self._raw_socket.getsockname()[1]
|
||||||
|
if peername is not None:
|
||||||
|
remote_port = peername[1]
|
||||||
|
attributes[SocketAttribute.remote_port] = lambda: remote_port
|
||||||
|
|
||||||
|
return attributes
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def _raw_socket(self) -> socket.socket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SocketStream(ByteStream, _SocketProvider):
|
||||||
|
"""
|
||||||
|
Transports bytes over a socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UNIXSocketStream(SocketStream):
|
||||||
|
@abstractmethod
|
||||||
|
async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
|
||||||
|
"""
|
||||||
|
Send file descriptors along with a message to the peer.
|
||||||
|
|
||||||
|
:param message: a non-empty bytestring
|
||||||
|
:param fds: a collection of files (either numeric file descriptors or open file or socket
|
||||||
|
objects)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
|
||||||
|
"""
|
||||||
|
Receive file descriptors along with a message from the peer.
|
||||||
|
|
||||||
|
:param msglen: length of the message to expect from the peer
|
||||||
|
:param maxfds: maximum number of file descriptors to expect from the peer
|
||||||
|
:return: a tuple of (message, file descriptors)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class SocketListener(Listener[SocketStream], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Listens to incoming socket connections.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def accept(self) -> SocketStream:
|
||||||
|
"""Accept an incoming connection."""
|
||||||
|
|
||||||
|
async def serve(
|
||||||
|
self,
|
||||||
|
handler: Callable[[SocketStream], Any],
|
||||||
|
task_group: TaskGroup | None = None,
|
||||||
|
) -> None:
|
||||||
|
async with AsyncExitStack() as exit_stack:
|
||||||
|
if task_group is None:
|
||||||
|
task_group = await exit_stack.enter_async_context(create_task_group())
|
||||||
|
|
||||||
|
while True:
|
||||||
|
stream = await self.accept()
|
||||||
|
task_group.start_soon(handler, stream)
|
||||||
|
|
||||||
|
|
||||||
|
class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Represents an unconnected UDP socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def sendto(self, data: bytes, host: str, port: int) -> None:
|
||||||
|
"""Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port)))."""
|
||||||
|
return await self.send((data, (host, port)))
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Represents an connected UDP socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
|
@ -0,0 +1,203 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from typing import Any, Callable, Generic, TypeVar, Union
|
||||||
|
|
||||||
|
from .._core._exceptions import EndOfStream
|
||||||
|
from .._core._typedattr import TypedAttributeProvider
|
||||||
|
from ._resources import AsyncResource
|
||||||
|
from ._tasks import TaskGroup
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
T_co = TypeVar("T_co", covariant=True)
|
||||||
|
T_contra = TypeVar("T_contra", contravariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class UnreliableObjectReceiveStream(
|
||||||
|
Generic[T_co], AsyncResource, TypedAttributeProvider
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
An interface for receiving objects.
|
||||||
|
|
||||||
|
This interface makes no guarantees that the received messages arrive in the order in which they
|
||||||
|
were sent, or that no messages are missed.
|
||||||
|
|
||||||
|
Asynchronously iterating over objects of this type will yield objects matching the given type
|
||||||
|
parameter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self) -> T_co:
|
||||||
|
try:
|
||||||
|
return await self.receive()
|
||||||
|
except EndOfStream:
|
||||||
|
raise StopAsyncIteration
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def receive(self) -> T_co:
|
||||||
|
"""
|
||||||
|
Receive the next item.
|
||||||
|
|
||||||
|
:raises ~anyio.ClosedResourceError: if the receive stream has been explicitly
|
||||||
|
closed
|
||||||
|
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
|
||||||
|
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
|
||||||
|
due to external causes
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnreliableObjectSendStream(
|
||||||
|
Generic[T_contra], AsyncResource, TypedAttributeProvider
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
An interface for sending objects.
|
||||||
|
|
||||||
|
This interface makes no guarantees that the messages sent will reach the recipient(s) in the
|
||||||
|
same order in which they were sent, or at all.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send(self, item: T_contra) -> None:
|
||||||
|
"""
|
||||||
|
Send an item to the peer(s).
|
||||||
|
|
||||||
|
:param item: the item to send
|
||||||
|
:raises ~anyio.ClosedResourceError: if the send stream has been explicitly
|
||||||
|
closed
|
||||||
|
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
|
||||||
|
due to external causes
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnreliableObjectStream(
|
||||||
|
UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A bidirectional message stream which does not guarantee the order or reliability of message
|
||||||
|
delivery.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]):
|
||||||
|
"""
|
||||||
|
A receive message stream which guarantees that messages are received in the same order in
|
||||||
|
which they were sent, and that no messages are missed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectSendStream(UnreliableObjectSendStream[T_contra]):
|
||||||
|
"""
|
||||||
|
A send message stream which guarantees that messages are delivered in the same order in which
|
||||||
|
they were sent, without missing any messages in the middle.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectStream(
|
||||||
|
ObjectReceiveStream[T_Item],
|
||||||
|
ObjectSendStream[T_Item],
|
||||||
|
UnreliableObjectStream[T_Item],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A bidirectional message stream which guarantees the order and reliability of message delivery.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
"""
|
||||||
|
Send an end-of-file indication to the peer.
|
||||||
|
|
||||||
|
You should not try to send any further data to this stream after calling this method.
|
||||||
|
This method is idempotent (does nothing on successive calls).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteReceiveStream(AsyncResource, TypedAttributeProvider):
|
||||||
|
"""
|
||||||
|
An interface for receiving bytes from a single peer.
|
||||||
|
|
||||||
|
Iterating this byte stream will yield a byte string of arbitrary length, but no more than
|
||||||
|
65536 bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __aiter__(self) -> ByteReceiveStream:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self) -> bytes:
|
||||||
|
try:
|
||||||
|
return await self.receive()
|
||||||
|
except EndOfStream:
|
||||||
|
raise StopAsyncIteration
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
"""
|
||||||
|
Receive at most ``max_bytes`` bytes from the peer.
|
||||||
|
|
||||||
|
.. note:: Implementors of this interface should not return an empty :class:`bytes` object,
|
||||||
|
and users should ignore them.
|
||||||
|
|
||||||
|
:param max_bytes: maximum number of bytes to receive
|
||||||
|
:return: the received bytes
|
||||||
|
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteSendStream(AsyncResource, TypedAttributeProvider):
|
||||||
|
"""An interface for sending bytes to a single peer."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
"""
|
||||||
|
Send the given bytes to the peer.
|
||||||
|
|
||||||
|
:param item: the bytes to send
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteStream(ByteReceiveStream, ByteSendStream):
|
||||||
|
"""A bidirectional byte stream."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
"""
|
||||||
|
Send an end-of-file indication to the peer.
|
||||||
|
|
||||||
|
You should not try to send any further data to this stream after calling this method.
|
||||||
|
This method is idempotent (does nothing on successive calls).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
#: Type alias for all unreliable bytes-oriented receive streams.
|
||||||
|
AnyUnreliableByteReceiveStream = Union[
|
||||||
|
UnreliableObjectReceiveStream[bytes], ByteReceiveStream
|
||||||
|
]
|
||||||
|
#: Type alias for all unreliable bytes-oriented send streams.
|
||||||
|
AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream]
|
||||||
|
#: Type alias for all unreliable bytes-oriented streams.
|
||||||
|
AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream]
|
||||||
|
#: Type alias for all bytes-oriented receive streams.
|
||||||
|
AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream]
|
||||||
|
#: Type alias for all bytes-oriented send streams.
|
||||||
|
AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream]
|
||||||
|
#: Type alias for all bytes-oriented streams.
|
||||||
|
AnyByteStream = Union[ObjectStream[bytes], ByteStream]
|
||||||
|
|
||||||
|
|
||||||
|
class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider):
|
||||||
|
"""An interface for objects that let you accept incoming connections."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def serve(
|
||||||
|
self,
|
||||||
|
handler: Callable[[T_co], Any],
|
||||||
|
task_group: TaskGroup | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Accept incoming connections as they come in and start tasks to handle them.
|
||||||
|
|
||||||
|
:param handler: a callable that will be used to handle each accepted connection
|
||||||
|
:param task_group: the task group that will be used to start tasks for handling each
|
||||||
|
accepted connection (if omitted, an ad-hoc task group will be created)
|
||||||
|
"""
|
|
@ -0,0 +1,79 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from signal import Signals
|
||||||
|
|
||||||
|
from ._resources import AsyncResource
|
||||||
|
from ._streams import ByteReceiveStream, ByteSendStream
|
||||||
|
|
||||||
|
|
||||||
|
class Process(AsyncResource):
|
||||||
|
"""An asynchronous version of :class:`subprocess.Popen`."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def wait(self) -> int:
|
||||||
|
"""
|
||||||
|
Wait until the process exits.
|
||||||
|
|
||||||
|
:return: the exit code of the process
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def terminate(self) -> None:
|
||||||
|
"""
|
||||||
|
Terminates the process, gracefully if possible.
|
||||||
|
|
||||||
|
On Windows, this calls ``TerminateProcess()``.
|
||||||
|
On POSIX systems, this sends ``SIGTERM`` to the process.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`subprocess.Popen.terminate`
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def kill(self) -> None:
|
||||||
|
"""
|
||||||
|
Kills the process.
|
||||||
|
|
||||||
|
On Windows, this calls ``TerminateProcess()``.
|
||||||
|
On POSIX systems, this sends ``SIGKILL`` to the process.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`subprocess.Popen.kill`
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def send_signal(self, signal: Signals) -> None:
|
||||||
|
"""
|
||||||
|
Send a signal to the subprocess.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`subprocess.Popen.send_signal`
|
||||||
|
|
||||||
|
:param signal: the signal number (e.g. :data:`signal.SIGHUP`)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def pid(self) -> int:
|
||||||
|
"""The process ID of the process."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def returncode(self) -> int | None:
|
||||||
|
"""
|
||||||
|
The return code of the process. If the process has not yet terminated, this will be
|
||||||
|
``None``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def stdin(self) -> ByteSendStream | None:
|
||||||
|
"""The stream for the standard input of the process."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def stdout(self) -> ByteReceiveStream | None:
|
||||||
|
"""The stream for the standard output of the process."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def stderr(self) -> ByteReceiveStream | None:
|
||||||
|
"""The stream for the standard error output of the process."""
|
|
@ -0,0 +1,119 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import TYPE_CHECKING, Any, Awaitable, Callable, TypeVar, overload
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Protocol
|
||||||
|
else:
|
||||||
|
from typing_extensions import Protocol
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from anyio._core._tasks import CancelScope
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
T_contra = TypeVar("T_contra", contravariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TaskStatus(Protocol[T_contra]):
|
||||||
|
@overload
|
||||||
|
def started(self: TaskStatus[None]) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def started(self, value: T_contra) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
def started(self, value: T_contra | None = None) -> None:
|
||||||
|
"""
|
||||||
|
Signal that the task has started.
|
||||||
|
|
||||||
|
:param value: object passed back to the starter of the task
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TaskGroup(metaclass=ABCMeta):
|
||||||
|
"""
|
||||||
|
Groups several asynchronous tasks together.
|
||||||
|
|
||||||
|
:ivar cancel_scope: the cancel scope inherited by all child tasks
|
||||||
|
:vartype cancel_scope: CancelScope
|
||||||
|
"""
|
||||||
|
|
||||||
|
cancel_scope: CancelScope
|
||||||
|
|
||||||
|
async def spawn(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[Any]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Start a new task in this task group.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to call the function with
|
||||||
|
:param name: name of the task, for the purposes of introspection and debugging
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you
|
||||||
|
can keep using this until AnyIO 4.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
'spawn() is deprecated -- use start_soon() (without the "await") instead',
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
self.start_soon(func, *args, name=name)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def start_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[Any]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Start a new task in this task group.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to call the function with
|
||||||
|
:param name: name of the task, for the purposes of introspection and debugging
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def start(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[Any]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Start a new task and wait until it signals for readiness.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to call the function with
|
||||||
|
:param name: name of the task, for the purposes of introspection and debugging
|
||||||
|
:return: the value passed to ``task_status.started()``
|
||||||
|
:raises RuntimeError: if the task finishes without calling ``task_status.started()``
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def __aenter__(self) -> TaskGroup:
|
||||||
|
"""Enter the task group context and allow starting new tasks."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
"""Exit the task group context waiting for all tasks to finish."""
|
|
@ -0,0 +1,70 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import types
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections.abc import AsyncGenerator, Iterable
|
||||||
|
from typing import Any, Callable, Coroutine, TypeVar
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
|
|
||||||
|
class TestRunner(metaclass=ABCMeta):
|
||||||
|
"""
|
||||||
|
Encapsulates a running event loop. Every call made through this object will use the same event
|
||||||
|
loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __enter__(self) -> TestRunner:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: types.TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
self.close()
|
||||||
|
return None
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def close(self) -> None:
|
||||||
|
"""Close the event loop."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_asyncgen_fixture(
|
||||||
|
self,
|
||||||
|
fixture_func: Callable[..., AsyncGenerator[_T, Any]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
) -> Iterable[_T]:
|
||||||
|
"""
|
||||||
|
Run an async generator fixture.
|
||||||
|
|
||||||
|
:param fixture_func: the fixture function
|
||||||
|
:param kwargs: keyword arguments to call the fixture function with
|
||||||
|
:return: an iterator yielding the value yielded from the async generator
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_fixture(
|
||||||
|
self,
|
||||||
|
fixture_func: Callable[..., Coroutine[Any, Any, _T]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
) -> _T:
|
||||||
|
"""
|
||||||
|
Run an async fixture.
|
||||||
|
|
||||||
|
:param fixture_func: the fixture function
|
||||||
|
:param kwargs: keyword arguments to call the fixture function with
|
||||||
|
:return: the return value of the fixture function
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_test(
|
||||||
|
self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Run an async test function.
|
||||||
|
|
||||||
|
:param test_func: the test function
|
||||||
|
:param kwargs: keyword arguments to call the test function with
|
||||||
|
"""
|
|
@ -0,0 +1,500 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import threading
|
||||||
|
from asyncio import iscoroutine
|
||||||
|
from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait
|
||||||
|
from contextlib import AbstractContextManager, contextmanager
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
AsyncContextManager,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
ContextManager,
|
||||||
|
Generator,
|
||||||
|
Generic,
|
||||||
|
Iterable,
|
||||||
|
TypeVar,
|
||||||
|
cast,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
from ._core import _eventloop
|
||||||
|
from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals
|
||||||
|
from ._core._synchronization import Event
|
||||||
|
from ._core._tasks import CancelScope, create_task_group
|
||||||
|
from .abc._tasks import TaskStatus
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
T_co = TypeVar("T_co")
|
||||||
|
|
||||||
|
|
||||||
|
def run(func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call a coroutine function from a worker thread.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:return: the return value of the coroutine function
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
asynclib = threadlocals.current_async_module
|
||||||
|
except AttributeError:
|
||||||
|
raise RuntimeError("This function can only be run from an AnyIO worker thread")
|
||||||
|
|
||||||
|
return asynclib.run_async_from_thread(func, *args)
|
||||||
|
|
||||||
|
|
||||||
|
def run_async_from_thread(
|
||||||
|
func: Callable[..., Awaitable[T_Retval]], *args: object
|
||||||
|
) -> T_Retval:
|
||||||
|
warn(
|
||||||
|
"run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return run(func, *args)
|
||||||
|
|
||||||
|
|
||||||
|
def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call a function in the event loop thread from a worker thread.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:return: the return value of the callable
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
asynclib = threadlocals.current_async_module
|
||||||
|
except AttributeError:
|
||||||
|
raise RuntimeError("This function can only be run from an AnyIO worker thread")
|
||||||
|
|
||||||
|
return asynclib.run_sync_from_thread(func, *args)
|
||||||
|
|
||||||
|
|
||||||
|
def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval:
|
||||||
|
warn(
|
||||||
|
"run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return run_sync(func, *args)
|
||||||
|
|
||||||
|
|
||||||
|
class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager):
|
||||||
|
_enter_future: Future
|
||||||
|
_exit_future: Future
|
||||||
|
_exit_event: Event
|
||||||
|
_exit_exc_info: tuple[
|
||||||
|
type[BaseException] | None, BaseException | None, TracebackType | None
|
||||||
|
] = (None, None, None)
|
||||||
|
|
||||||
|
def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal):
|
||||||
|
self._async_cm = async_cm
|
||||||
|
self._portal = portal
|
||||||
|
|
||||||
|
async def run_async_cm(self) -> bool | None:
|
||||||
|
try:
|
||||||
|
self._exit_event = Event()
|
||||||
|
value = await self._async_cm.__aenter__()
|
||||||
|
except BaseException as exc:
|
||||||
|
self._enter_future.set_exception(exc)
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
self._enter_future.set_result(value)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Wait for the sync context manager to exit.
|
||||||
|
# This next statement can raise `get_cancelled_exc_class()` if
|
||||||
|
# something went wrong in a task group in this async context
|
||||||
|
# manager.
|
||||||
|
await self._exit_event.wait()
|
||||||
|
finally:
|
||||||
|
# In case of cancellation, it could be that we end up here before
|
||||||
|
# `_BlockingAsyncContextManager.__exit__` is called, and an
|
||||||
|
# `_exit_exc_info` has been set.
|
||||||
|
result = await self._async_cm.__aexit__(*self._exit_exc_info)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __enter__(self) -> T_co:
|
||||||
|
self._enter_future = Future()
|
||||||
|
self._exit_future = self._portal.start_task_soon(self.run_async_cm)
|
||||||
|
cm = self._enter_future.result()
|
||||||
|
return cast(T_co, cm)
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
__exc_type: type[BaseException] | None,
|
||||||
|
__exc_value: BaseException | None,
|
||||||
|
__traceback: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
self._exit_exc_info = __exc_type, __exc_value, __traceback
|
||||||
|
self._portal.call(self._exit_event.set)
|
||||||
|
return self._exit_future.result()
|
||||||
|
|
||||||
|
|
||||||
|
class _BlockingPortalTaskStatus(TaskStatus):
|
||||||
|
def __init__(self, future: Future):
|
||||||
|
self._future = future
|
||||||
|
|
||||||
|
def started(self, value: object = None) -> None:
|
||||||
|
self._future.set_result(value)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingPortal:
|
||||||
|
"""An object that lets external threads run code in an asynchronous event loop."""
|
||||||
|
|
||||||
|
def __new__(cls) -> BlockingPortal:
|
||||||
|
return get_asynclib().BlockingPortal()
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._event_loop_thread_id: int | None = threading.get_ident()
|
||||||
|
self._stop_event = Event()
|
||||||
|
self._task_group = create_task_group()
|
||||||
|
self._cancelled_exc_class = get_cancelled_exc_class()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> BlockingPortal:
|
||||||
|
await self._task_group.__aenter__()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
await self.stop()
|
||||||
|
return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
def _check_running(self) -> None:
|
||||||
|
if self._event_loop_thread_id is None:
|
||||||
|
raise RuntimeError("This portal is not running")
|
||||||
|
if self._event_loop_thread_id == threading.get_ident():
|
||||||
|
raise RuntimeError(
|
||||||
|
"This method cannot be called from the event loop thread"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def sleep_until_stopped(self) -> None:
|
||||||
|
"""Sleep until :meth:`stop` is called."""
|
||||||
|
await self._stop_event.wait()
|
||||||
|
|
||||||
|
async def stop(self, cancel_remaining: bool = False) -> None:
|
||||||
|
"""
|
||||||
|
Signal the portal to shut down.
|
||||||
|
|
||||||
|
This marks the portal as no longer accepting new calls and exits from
|
||||||
|
:meth:`sleep_until_stopped`.
|
||||||
|
|
||||||
|
:param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them
|
||||||
|
finish before returning
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._event_loop_thread_id = None
|
||||||
|
self._stop_event.set()
|
||||||
|
if cancel_remaining:
|
||||||
|
self._task_group.cancel_scope.cancel()
|
||||||
|
|
||||||
|
async def _call_func(
|
||||||
|
self, func: Callable, args: tuple, kwargs: dict[str, Any], future: Future
|
||||||
|
) -> None:
|
||||||
|
def callback(f: Future) -> None:
|
||||||
|
if f.cancelled() and self._event_loop_thread_id not in (
|
||||||
|
None,
|
||||||
|
threading.get_ident(),
|
||||||
|
):
|
||||||
|
self.call(scope.cancel)
|
||||||
|
|
||||||
|
try:
|
||||||
|
retval = func(*args, **kwargs)
|
||||||
|
if iscoroutine(retval):
|
||||||
|
with CancelScope() as scope:
|
||||||
|
if future.cancelled():
|
||||||
|
scope.cancel()
|
||||||
|
else:
|
||||||
|
future.add_done_callback(callback)
|
||||||
|
|
||||||
|
retval = await retval
|
||||||
|
except self._cancelled_exc_class:
|
||||||
|
future.cancel()
|
||||||
|
except BaseException as exc:
|
||||||
|
if not future.cancelled():
|
||||||
|
future.set_exception(exc)
|
||||||
|
|
||||||
|
# Let base exceptions fall through
|
||||||
|
if not isinstance(exc, Exception):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
if not future.cancelled():
|
||||||
|
future.set_result(retval)
|
||||||
|
finally:
|
||||||
|
scope = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
def _spawn_task_from_thread(
|
||||||
|
self,
|
||||||
|
func: Callable,
|
||||||
|
args: tuple,
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
name: object,
|
||||||
|
future: Future,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Spawn a new task using the given callable.
|
||||||
|
|
||||||
|
Implementors must ensure that the future is resolved when the task finishes.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments to be passed to the callable
|
||||||
|
:param kwargs: keyword arguments to be passed to the callable
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:param future: a future that will resolve to the return value of the callable, or the
|
||||||
|
exception raised during its execution
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def call(self, func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval:
|
||||||
|
...
|
||||||
|
|
||||||
|
def call(
|
||||||
|
self, func: Callable[..., Awaitable[T_Retval] | T_Retval], *args: object
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function in the event loop thread.
|
||||||
|
|
||||||
|
If the callable returns a coroutine object, it is awaited on.
|
||||||
|
|
||||||
|
:param func: any callable
|
||||||
|
:raises RuntimeError: if the portal is not running or if this method is called from within
|
||||||
|
the event loop thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return cast(T_Retval, self.start_task_soon(func, *args).result())
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def spawn_task(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[T_Retval]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def spawn_task(
|
||||||
|
self, func: Callable[..., T_Retval], *args: object, name: object = None
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
...
|
||||||
|
|
||||||
|
def spawn_task(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[T_Retval] | T_Retval],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
"""
|
||||||
|
Start a task in the portal's task group.
|
||||||
|
|
||||||
|
:param func: the target coroutine function
|
||||||
|
:param args: positional arguments passed to ``func``
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:return: a future that resolves with the return value of the callable if the task completes
|
||||||
|
successfully, or with the exception raised in the task
|
||||||
|
:raises RuntimeError: if the portal is not running or if this method is called from within
|
||||||
|
the event loop thread
|
||||||
|
|
||||||
|
.. versionadded:: 2.1
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you
|
||||||
|
can keep using this until AnyIO 4.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
"spawn_task() is deprecated -- use start_task_soon() instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def start_task_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[T_Retval]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def start_task_soon(
|
||||||
|
self, func: Callable[..., T_Retval], *args: object, name: object = None
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
...
|
||||||
|
|
||||||
|
def start_task_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[T_Retval] | T_Retval],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
"""
|
||||||
|
Start a task in the portal's task group.
|
||||||
|
|
||||||
|
The task will be run inside a cancel scope which can be cancelled by cancelling the
|
||||||
|
returned future.
|
||||||
|
|
||||||
|
:param func: the target function
|
||||||
|
:param args: positional arguments passed to ``func``
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:return: a future that resolves with the return value of the callable if the
|
||||||
|
task completes successfully, or with the exception raised in the task
|
||||||
|
:raises RuntimeError: if the portal is not running or if this method is called
|
||||||
|
from within the event loop thread
|
||||||
|
:rtype: concurrent.futures.Future[T_Retval]
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._check_running()
|
||||||
|
f: Future = Future()
|
||||||
|
self._spawn_task_from_thread(func, args, {}, name, f)
|
||||||
|
return f
|
||||||
|
|
||||||
|
def start_task(
|
||||||
|
self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
|
||||||
|
) -> tuple[Future[Any], Any]:
|
||||||
|
"""
|
||||||
|
Start a task in the portal's task group and wait until it signals for readiness.
|
||||||
|
|
||||||
|
This method works the same way as :meth:`.abc.TaskGroup.start`.
|
||||||
|
|
||||||
|
:param func: the target function
|
||||||
|
:param args: positional arguments passed to ``func``
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:return: a tuple of (future, task_status_value) where the ``task_status_value``
|
||||||
|
is the value passed to ``task_status.started()`` from within the target
|
||||||
|
function
|
||||||
|
:rtype: tuple[concurrent.futures.Future[Any], Any]
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def task_done(future: Future) -> None:
|
||||||
|
if not task_status_future.done():
|
||||||
|
if future.cancelled():
|
||||||
|
task_status_future.cancel()
|
||||||
|
elif future.exception():
|
||||||
|
task_status_future.set_exception(future.exception())
|
||||||
|
else:
|
||||||
|
exc = RuntimeError(
|
||||||
|
"Task exited without calling task_status.started()"
|
||||||
|
)
|
||||||
|
task_status_future.set_exception(exc)
|
||||||
|
|
||||||
|
self._check_running()
|
||||||
|
task_status_future: Future = Future()
|
||||||
|
task_status = _BlockingPortalTaskStatus(task_status_future)
|
||||||
|
f: Future = Future()
|
||||||
|
f.add_done_callback(task_done)
|
||||||
|
self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f)
|
||||||
|
return f, task_status_future.result()
|
||||||
|
|
||||||
|
def wrap_async_context_manager(
|
||||||
|
self, cm: AsyncContextManager[T_co]
|
||||||
|
) -> ContextManager[T_co]:
|
||||||
|
"""
|
||||||
|
Wrap an async context manager as a synchronous context manager via this portal.
|
||||||
|
|
||||||
|
Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the
|
||||||
|
middle until the synchronous context manager exits.
|
||||||
|
|
||||||
|
:param cm: an asynchronous context manager
|
||||||
|
:return: a synchronous context manager
|
||||||
|
|
||||||
|
.. versionadded:: 2.1
|
||||||
|
|
||||||
|
"""
|
||||||
|
return _BlockingAsyncContextManager(cm, self)
|
||||||
|
|
||||||
|
|
||||||
|
def create_blocking_portal() -> BlockingPortal:
|
||||||
|
"""
|
||||||
|
Create a portal for running functions in the event loop thread from external threads.
|
||||||
|
|
||||||
|
Use this function in asynchronous code when you need to allow external threads access to the
|
||||||
|
event loop where your asynchronous code is currently running.
|
||||||
|
|
||||||
|
.. deprecated:: 3.0
|
||||||
|
Use :class:`.BlockingPortal` directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warn(
|
||||||
|
"create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() "
|
||||||
|
"directly",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return BlockingPortal()
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def start_blocking_portal(
|
||||||
|
backend: str = "asyncio", backend_options: dict[str, Any] | None = None
|
||||||
|
) -> Generator[BlockingPortal, Any, None]:
|
||||||
|
"""
|
||||||
|
Start a new event loop in a new thread and run a blocking portal in its main task.
|
||||||
|
|
||||||
|
The parameters are the same as for :func:`~anyio.run`.
|
||||||
|
|
||||||
|
:param backend: name of the backend
|
||||||
|
:param backend_options: backend options
|
||||||
|
:return: a context manager that yields a blocking portal
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
Usage as a context manager is now required.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def run_portal() -> None:
|
||||||
|
async with BlockingPortal() as portal_:
|
||||||
|
if future.set_running_or_notify_cancel():
|
||||||
|
future.set_result(portal_)
|
||||||
|
await portal_.sleep_until_stopped()
|
||||||
|
|
||||||
|
future: Future[BlockingPortal] = Future()
|
||||||
|
with ThreadPoolExecutor(1) as executor:
|
||||||
|
run_future = executor.submit(
|
||||||
|
_eventloop.run,
|
||||||
|
run_portal, # type: ignore[arg-type]
|
||||||
|
backend=backend,
|
||||||
|
backend_options=backend_options,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
wait(
|
||||||
|
cast(Iterable[Future], [run_future, future]),
|
||||||
|
return_when=FIRST_COMPLETED,
|
||||||
|
)
|
||||||
|
except BaseException:
|
||||||
|
future.cancel()
|
||||||
|
run_future.cancel()
|
||||||
|
raise
|
||||||
|
|
||||||
|
if future.done():
|
||||||
|
portal = future.result()
|
||||||
|
cancel_remaining_tasks = False
|
||||||
|
try:
|
||||||
|
yield portal
|
||||||
|
except BaseException:
|
||||||
|
cancel_remaining_tasks = True
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
portal.call(portal.stop, cancel_remaining_tasks)
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
run_future.result()
|
|
@ -0,0 +1,174 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import enum
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Generic, TypeVar, overload
|
||||||
|
from weakref import WeakKeyDictionary
|
||||||
|
|
||||||
|
from ._core._eventloop import get_asynclib
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Literal
|
||||||
|
else:
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
D = TypeVar("D")
|
||||||
|
|
||||||
|
|
||||||
|
async def checkpoint() -> None:
|
||||||
|
"""
|
||||||
|
Check for cancellation and allow the scheduler to switch to another task.
|
||||||
|
|
||||||
|
Equivalent to (but more efficient than)::
|
||||||
|
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
await cancel_shielded_checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
await get_asynclib().checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
async def checkpoint_if_cancelled() -> None:
|
||||||
|
"""
|
||||||
|
Enter a checkpoint if the enclosing cancel scope has been cancelled.
|
||||||
|
|
||||||
|
This does not allow the scheduler to switch to a different task.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
await get_asynclib().checkpoint_if_cancelled()
|
||||||
|
|
||||||
|
|
||||||
|
async def cancel_shielded_checkpoint() -> None:
|
||||||
|
"""
|
||||||
|
Allow the scheduler to switch to another task but without checking for cancellation.
|
||||||
|
|
||||||
|
Equivalent to (but potentially more efficient than)::
|
||||||
|
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
await checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
await get_asynclib().cancel_shielded_checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
def current_token() -> object:
|
||||||
|
"""Return a backend specific token object that can be used to get back to the event loop."""
|
||||||
|
return get_asynclib().current_token()
|
||||||
|
|
||||||
|
|
||||||
|
_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary()
|
||||||
|
_token_wrappers: dict[Any, _TokenWrapper] = {}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class _TokenWrapper:
|
||||||
|
__slots__ = "_token", "__weakref__"
|
||||||
|
_token: object
|
||||||
|
|
||||||
|
|
||||||
|
class _NoValueSet(enum.Enum):
|
||||||
|
NO_VALUE_SET = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
class RunvarToken(Generic[T]):
|
||||||
|
__slots__ = "_var", "_value", "_redeemed"
|
||||||
|
|
||||||
|
def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]):
|
||||||
|
self._var = var
|
||||||
|
self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value
|
||||||
|
self._redeemed = False
|
||||||
|
|
||||||
|
|
||||||
|
class RunVar(Generic[T]):
|
||||||
|
"""
|
||||||
|
Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "_name", "_default"
|
||||||
|
|
||||||
|
NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET
|
||||||
|
|
||||||
|
_token_wrappers: set[_TokenWrapper] = set()
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET,
|
||||||
|
):
|
||||||
|
self._name = name
|
||||||
|
self._default = default
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _current_vars(self) -> dict[str, T]:
|
||||||
|
token = current_token()
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
return _run_vars[token]
|
||||||
|
except TypeError:
|
||||||
|
# Happens when token isn't weak referable (TrioToken).
|
||||||
|
# This workaround does mean that some memory will leak on Trio until the problem
|
||||||
|
# is fixed on their end.
|
||||||
|
token = _TokenWrapper(token)
|
||||||
|
self._token_wrappers.add(token)
|
||||||
|
except KeyError:
|
||||||
|
run_vars = _run_vars[token] = {}
|
||||||
|
return run_vars
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get(self, default: D) -> T | D:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get(self) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
def get(
|
||||||
|
self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET
|
||||||
|
) -> T | D:
|
||||||
|
try:
|
||||||
|
return self._current_vars[self._name]
|
||||||
|
except KeyError:
|
||||||
|
if default is not RunVar.NO_VALUE_SET:
|
||||||
|
return default
|
||||||
|
elif self._default is not RunVar.NO_VALUE_SET:
|
||||||
|
return self._default
|
||||||
|
|
||||||
|
raise LookupError(
|
||||||
|
f'Run variable "{self._name}" has no value and no default set'
|
||||||
|
)
|
||||||
|
|
||||||
|
def set(self, value: T) -> RunvarToken[T]:
|
||||||
|
current_vars = self._current_vars
|
||||||
|
token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET))
|
||||||
|
current_vars[self._name] = value
|
||||||
|
return token
|
||||||
|
|
||||||
|
def reset(self, token: RunvarToken[T]) -> None:
|
||||||
|
if token._var is not self:
|
||||||
|
raise ValueError("This token does not belong to this RunVar")
|
||||||
|
|
||||||
|
if token._redeemed:
|
||||||
|
raise ValueError("This token has already been used")
|
||||||
|
|
||||||
|
if token._value is _NoValueSet.NO_VALUE_SET:
|
||||||
|
try:
|
||||||
|
del self._current_vars[self._name]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self._current_vars[self._name] = token._value
|
||||||
|
|
||||||
|
token._redeemed = True
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<RunVar name={self._name!r}>"
|
|
@ -0,0 +1,142 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from inspect import isasyncgenfunction, iscoroutinefunction
|
||||||
|
from typing import Any, Dict, Generator, Tuple, cast
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import sniffio
|
||||||
|
|
||||||
|
from ._core._eventloop import get_all_backends, get_asynclib
|
||||||
|
from .abc import TestRunner
|
||||||
|
|
||||||
|
_current_runner: TestRunner | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]:
|
||||||
|
if isinstance(backend, str):
|
||||||
|
return backend, {}
|
||||||
|
elif isinstance(backend, tuple) and len(backend) == 2:
|
||||||
|
if isinstance(backend[0], str) and isinstance(backend[1], dict):
|
||||||
|
return cast(Tuple[str, Dict[str, Any]], backend)
|
||||||
|
|
||||||
|
raise TypeError("anyio_backend must be either a string or tuple of (string, dict)")
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def get_runner(
|
||||||
|
backend_name: str, backend_options: dict[str, Any]
|
||||||
|
) -> Generator[TestRunner, object, None]:
|
||||||
|
global _current_runner
|
||||||
|
if _current_runner:
|
||||||
|
yield _current_runner
|
||||||
|
return
|
||||||
|
|
||||||
|
asynclib = get_asynclib(backend_name)
|
||||||
|
token = None
|
||||||
|
if sniffio.current_async_library_cvar.get(None) is None:
|
||||||
|
# Since we're in control of the event loop, we can cache the name of the async library
|
||||||
|
token = sniffio.current_async_library_cvar.set(backend_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
backend_options = backend_options or {}
|
||||||
|
with asynclib.TestRunner(**backend_options) as runner:
|
||||||
|
_current_runner = runner
|
||||||
|
yield runner
|
||||||
|
finally:
|
||||||
|
_current_runner = None
|
||||||
|
if token:
|
||||||
|
sniffio.current_async_library_cvar.reset(token)
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config: Any) -> None:
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"anyio: mark the (coroutine function) test to be run "
|
||||||
|
"asynchronously via anyio.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_fixture_setup(fixturedef: Any, request: Any) -> None:
|
||||||
|
def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def]
|
||||||
|
backend_name, backend_options = extract_backend_and_options(anyio_backend)
|
||||||
|
if has_backend_arg:
|
||||||
|
kwargs["anyio_backend"] = anyio_backend
|
||||||
|
|
||||||
|
with get_runner(backend_name, backend_options) as runner:
|
||||||
|
if isasyncgenfunction(func):
|
||||||
|
yield from runner.run_asyncgen_fixture(func, kwargs)
|
||||||
|
else:
|
||||||
|
yield runner.run_fixture(func, kwargs)
|
||||||
|
|
||||||
|
# Only apply this to coroutine functions and async generator functions in requests that involve
|
||||||
|
# the anyio_backend fixture
|
||||||
|
func = fixturedef.func
|
||||||
|
if isasyncgenfunction(func) or iscoroutinefunction(func):
|
||||||
|
if "anyio_backend" in request.fixturenames:
|
||||||
|
has_backend_arg = "anyio_backend" in fixturedef.argnames
|
||||||
|
fixturedef.func = wrapper
|
||||||
|
if not has_backend_arg:
|
||||||
|
fixturedef.argnames += ("anyio_backend",)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.hookimpl(tryfirst=True)
|
||||||
|
def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None:
|
||||||
|
if collector.istestfunction(obj, name):
|
||||||
|
inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj
|
||||||
|
if iscoroutinefunction(inner_func):
|
||||||
|
marker = collector.get_closest_marker("anyio")
|
||||||
|
own_markers = getattr(obj, "pytestmark", ())
|
||||||
|
if marker or any(marker.name == "anyio" for marker in own_markers):
|
||||||
|
pytest.mark.usefixtures("anyio_backend")(obj)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.hookimpl(tryfirst=True)
|
||||||
|
def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None:
|
||||||
|
def run_with_hypothesis(**kwargs: Any) -> None:
|
||||||
|
with get_runner(backend_name, backend_options) as runner:
|
||||||
|
runner.run_test(original_func, kwargs)
|
||||||
|
|
||||||
|
backend = pyfuncitem.funcargs.get("anyio_backend")
|
||||||
|
if backend:
|
||||||
|
backend_name, backend_options = extract_backend_and_options(backend)
|
||||||
|
|
||||||
|
if hasattr(pyfuncitem.obj, "hypothesis"):
|
||||||
|
# Wrap the inner test function unless it's already wrapped
|
||||||
|
original_func = pyfuncitem.obj.hypothesis.inner_test
|
||||||
|
if original_func.__qualname__ != run_with_hypothesis.__qualname__:
|
||||||
|
if iscoroutinefunction(original_func):
|
||||||
|
pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
if iscoroutinefunction(pyfuncitem.obj):
|
||||||
|
funcargs = pyfuncitem.funcargs
|
||||||
|
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
|
||||||
|
with get_runner(backend_name, backend_options) as runner:
|
||||||
|
runner.run_test(pyfuncitem.obj, testargs)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(params=get_all_backends())
|
||||||
|
def anyio_backend(request: Any) -> Any:
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def anyio_backend_name(anyio_backend: Any) -> str:
|
||||||
|
if isinstance(anyio_backend, str):
|
||||||
|
return anyio_backend
|
||||||
|
else:
|
||||||
|
return anyio_backend[0]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]:
|
||||||
|
if isinstance(anyio_backend, str):
|
||||||
|
return {}
|
||||||
|
else:
|
||||||
|
return anyio_backend[1]
|
|
@ -0,0 +1,118 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Callable, Mapping
|
||||||
|
|
||||||
|
from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead
|
||||||
|
from ..abc import AnyByteReceiveStream, ByteReceiveStream
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class BufferedByteReceiveStream(ByteReceiveStream):
|
||||||
|
"""
|
||||||
|
Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving
|
||||||
|
capabilities in the form of a byte stream.
|
||||||
|
"""
|
||||||
|
|
||||||
|
receive_stream: AnyByteReceiveStream
|
||||||
|
_buffer: bytearray = field(init=False, default_factory=bytearray)
|
||||||
|
_closed: bool = field(init=False, default=False)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.receive_stream.aclose()
|
||||||
|
self._closed = True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def buffer(self) -> bytes:
|
||||||
|
"""The bytes currently in the buffer."""
|
||||||
|
return bytes(self._buffer)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return self.receive_stream.extra_attributes
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
if self._buffer:
|
||||||
|
chunk = bytes(self._buffer[:max_bytes])
|
||||||
|
del self._buffer[:max_bytes]
|
||||||
|
return chunk
|
||||||
|
elif isinstance(self.receive_stream, ByteReceiveStream):
|
||||||
|
return await self.receive_stream.receive(max_bytes)
|
||||||
|
else:
|
||||||
|
# With a bytes-oriented object stream, we need to handle any surplus bytes we get from
|
||||||
|
# the receive() call
|
||||||
|
chunk = await self.receive_stream.receive()
|
||||||
|
if len(chunk) > max_bytes:
|
||||||
|
# Save the surplus bytes in the buffer
|
||||||
|
self._buffer.extend(chunk[max_bytes:])
|
||||||
|
return chunk[:max_bytes]
|
||||||
|
else:
|
||||||
|
return chunk
|
||||||
|
|
||||||
|
async def receive_exactly(self, nbytes: int) -> bytes:
|
||||||
|
"""
|
||||||
|
Read exactly the given amount of bytes from the stream.
|
||||||
|
|
||||||
|
:param nbytes: the number of bytes to read
|
||||||
|
:return: the bytes read
|
||||||
|
:raises ~anyio.IncompleteRead: if the stream was closed before the requested
|
||||||
|
amount of bytes could be read from the stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
remaining = nbytes - len(self._buffer)
|
||||||
|
if remaining <= 0:
|
||||||
|
retval = self._buffer[:nbytes]
|
||||||
|
del self._buffer[:nbytes]
|
||||||
|
return bytes(retval)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if isinstance(self.receive_stream, ByteReceiveStream):
|
||||||
|
chunk = await self.receive_stream.receive(remaining)
|
||||||
|
else:
|
||||||
|
chunk = await self.receive_stream.receive()
|
||||||
|
except EndOfStream as exc:
|
||||||
|
raise IncompleteRead from exc
|
||||||
|
|
||||||
|
self._buffer.extend(chunk)
|
||||||
|
|
||||||
|
async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes:
|
||||||
|
"""
|
||||||
|
Read from the stream until the delimiter is found or max_bytes have been read.
|
||||||
|
|
||||||
|
:param delimiter: the marker to look for in the stream
|
||||||
|
:param max_bytes: maximum number of bytes that will be read before raising
|
||||||
|
:exc:`~anyio.DelimiterNotFound`
|
||||||
|
:return: the bytes read (not including the delimiter)
|
||||||
|
:raises ~anyio.IncompleteRead: if the stream was closed before the delimiter
|
||||||
|
was found
|
||||||
|
:raises ~anyio.DelimiterNotFound: if the delimiter is not found within the
|
||||||
|
bytes read up to the maximum allowed
|
||||||
|
|
||||||
|
"""
|
||||||
|
delimiter_size = len(delimiter)
|
||||||
|
offset = 0
|
||||||
|
while True:
|
||||||
|
# Check if the delimiter can be found in the current buffer
|
||||||
|
index = self._buffer.find(delimiter, offset)
|
||||||
|
if index >= 0:
|
||||||
|
found = self._buffer[:index]
|
||||||
|
del self._buffer[: index + len(delimiter) :]
|
||||||
|
return bytes(found)
|
||||||
|
|
||||||
|
# Check if the buffer is already at or over the limit
|
||||||
|
if len(self._buffer) >= max_bytes:
|
||||||
|
raise DelimiterNotFound(max_bytes)
|
||||||
|
|
||||||
|
# Read more data into the buffer from the socket
|
||||||
|
try:
|
||||||
|
data = await self.receive_stream.receive()
|
||||||
|
except EndOfStream as exc:
|
||||||
|
raise IncompleteRead from exc
|
||||||
|
|
||||||
|
# Move the offset forward and add the new data to the buffer
|
||||||
|
offset = max(len(self._buffer) - delimiter_size + 1, 0)
|
||||||
|
self._buffer.extend(data)
|
|
@ -0,0 +1,147 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from io import SEEK_SET, UnsupportedOperation
|
||||||
|
from os import PathLike
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, BinaryIO, Callable, Mapping, cast
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
BrokenResourceError,
|
||||||
|
ClosedResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
TypedAttributeSet,
|
||||||
|
to_thread,
|
||||||
|
typed_attribute,
|
||||||
|
)
|
||||||
|
from ..abc import ByteReceiveStream, ByteSendStream
|
||||||
|
|
||||||
|
|
||||||
|
class FileStreamAttribute(TypedAttributeSet):
|
||||||
|
#: the open file descriptor
|
||||||
|
file: BinaryIO = typed_attribute()
|
||||||
|
#: the path of the file on the file system, if available (file must be a real file)
|
||||||
|
path: Path = typed_attribute()
|
||||||
|
#: the file number, if available (file must be a real file or a TTY)
|
||||||
|
fileno: int = typed_attribute()
|
||||||
|
|
||||||
|
|
||||||
|
class _BaseFileStream:
|
||||||
|
def __init__(self, file: BinaryIO):
|
||||||
|
self._file = file
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await to_thread.run_sync(self._file.close)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
attributes: dict[Any, Callable[[], Any]] = {
|
||||||
|
FileStreamAttribute.file: lambda: self._file,
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasattr(self._file, "name"):
|
||||||
|
attributes[FileStreamAttribute.path] = lambda: Path(self._file.name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._file.fileno()
|
||||||
|
except UnsupportedOperation:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno()
|
||||||
|
|
||||||
|
return attributes
|
||||||
|
|
||||||
|
|
||||||
|
class FileReadStream(_BaseFileStream, ByteReceiveStream):
|
||||||
|
"""
|
||||||
|
A byte stream that reads from a file in the file system.
|
||||||
|
|
||||||
|
:param file: a file that has been opened for reading in binary mode
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_path(cls, path: str | PathLike[str]) -> FileReadStream:
|
||||||
|
"""
|
||||||
|
Create a file read stream by opening the given file.
|
||||||
|
|
||||||
|
:param path: path of the file to read from
|
||||||
|
|
||||||
|
"""
|
||||||
|
file = await to_thread.run_sync(Path(path).open, "rb")
|
||||||
|
return cls(cast(BinaryIO, file))
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
try:
|
||||||
|
data = await to_thread.run_sync(self._file.read, max_bytes)
|
||||||
|
except ValueError:
|
||||||
|
raise ClosedResourceError from None
|
||||||
|
except OSError as exc:
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
|
||||||
|
if data:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
async def seek(self, position: int, whence: int = SEEK_SET) -> int:
|
||||||
|
"""
|
||||||
|
Seek the file to the given position.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`io.IOBase.seek`
|
||||||
|
|
||||||
|
.. note:: Not all file descriptors are seekable.
|
||||||
|
|
||||||
|
:param position: position to seek the file to
|
||||||
|
:param whence: controls how ``position`` is interpreted
|
||||||
|
:return: the new absolute position
|
||||||
|
:raises OSError: if the file is not seekable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(self._file.seek, position, whence)
|
||||||
|
|
||||||
|
async def tell(self) -> int:
|
||||||
|
"""
|
||||||
|
Return the current stream position.
|
||||||
|
|
||||||
|
.. note:: Not all file descriptors are seekable.
|
||||||
|
|
||||||
|
:return: the current absolute position
|
||||||
|
:raises OSError: if the file is not seekable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(self._file.tell)
|
||||||
|
|
||||||
|
|
||||||
|
class FileWriteStream(_BaseFileStream, ByteSendStream):
|
||||||
|
"""
|
||||||
|
A byte stream that writes to a file in the file system.
|
||||||
|
|
||||||
|
:param file: a file that has been opened for writing in binary mode
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_path(
|
||||||
|
cls, path: str | PathLike[str], append: bool = False
|
||||||
|
) -> FileWriteStream:
|
||||||
|
"""
|
||||||
|
Create a file write stream by opening the given file for writing.
|
||||||
|
|
||||||
|
:param path: path of the file to write to
|
||||||
|
:param append: if ``True``, open the file for appending; if ``False``, any existing file
|
||||||
|
at the given path will be truncated
|
||||||
|
|
||||||
|
"""
|
||||||
|
mode = "ab" if append else "wb"
|
||||||
|
file = await to_thread.run_sync(Path(path).open, mode)
|
||||||
|
return cls(cast(BinaryIO, file))
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
try:
|
||||||
|
await to_thread.run_sync(self._file.write, item)
|
||||||
|
except ValueError:
|
||||||
|
raise ClosedResourceError from None
|
||||||
|
except OSError as exc:
|
||||||
|
raise BrokenResourceError from exc
|
|
@ -0,0 +1,279 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections import OrderedDict, deque
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Generic, NamedTuple, TypeVar
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
BrokenResourceError,
|
||||||
|
ClosedResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
WouldBlock,
|
||||||
|
get_cancelled_exc_class,
|
||||||
|
)
|
||||||
|
from .._core._compat import DeprecatedAwaitable
|
||||||
|
from ..abc import Event, ObjectReceiveStream, ObjectSendStream
|
||||||
|
from ..lowlevel import checkpoint
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
T_co = TypeVar("T_co", covariant=True)
|
||||||
|
T_contra = TypeVar("T_contra", contravariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryObjectStreamStatistics(NamedTuple):
|
||||||
|
current_buffer_used: int #: number of items stored in the buffer
|
||||||
|
#: maximum number of items that can be stored on this stream (or :data:`math.inf`)
|
||||||
|
max_buffer_size: float
|
||||||
|
open_send_streams: int #: number of unclosed clones of the send stream
|
||||||
|
open_receive_streams: int #: number of unclosed clones of the receive stream
|
||||||
|
tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send`
|
||||||
|
#: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive`
|
||||||
|
tasks_waiting_receive: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MemoryObjectStreamState(Generic[T_Item]):
|
||||||
|
max_buffer_size: float = field()
|
||||||
|
buffer: deque[T_Item] = field(init=False, default_factory=deque)
|
||||||
|
open_send_channels: int = field(init=False, default=0)
|
||||||
|
open_receive_channels: int = field(init=False, default=0)
|
||||||
|
waiting_receivers: OrderedDict[Event, list[T_Item]] = field(
|
||||||
|
init=False, default_factory=OrderedDict
|
||||||
|
)
|
||||||
|
waiting_senders: OrderedDict[Event, T_Item] = field(
|
||||||
|
init=False, default_factory=OrderedDict
|
||||||
|
)
|
||||||
|
|
||||||
|
def statistics(self) -> MemoryObjectStreamStatistics:
|
||||||
|
return MemoryObjectStreamStatistics(
|
||||||
|
len(self.buffer),
|
||||||
|
self.max_buffer_size,
|
||||||
|
self.open_send_channels,
|
||||||
|
self.open_receive_channels,
|
||||||
|
len(self.waiting_senders),
|
||||||
|
len(self.waiting_receivers),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
|
||||||
|
_state: MemoryObjectStreamState[T_co]
|
||||||
|
_closed: bool = field(init=False, default=False)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
self._state.open_receive_channels += 1
|
||||||
|
|
||||||
|
def receive_nowait(self) -> T_co:
|
||||||
|
"""
|
||||||
|
Receive the next item if it can be done without waiting.
|
||||||
|
|
||||||
|
:return: the received item
|
||||||
|
:raises ~anyio.ClosedResourceError: if this send stream has been closed
|
||||||
|
:raises ~anyio.EndOfStream: if the buffer is empty and this stream has been
|
||||||
|
closed from the sending end
|
||||||
|
:raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks
|
||||||
|
waiting to send
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
if self._state.waiting_senders:
|
||||||
|
# Get the item from the next sender
|
||||||
|
send_event, item = self._state.waiting_senders.popitem(last=False)
|
||||||
|
self._state.buffer.append(item)
|
||||||
|
send_event.set()
|
||||||
|
|
||||||
|
if self._state.buffer:
|
||||||
|
return self._state.buffer.popleft()
|
||||||
|
elif not self._state.open_send_channels:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
raise WouldBlock
|
||||||
|
|
||||||
|
async def receive(self) -> T_co:
|
||||||
|
await checkpoint()
|
||||||
|
try:
|
||||||
|
return self.receive_nowait()
|
||||||
|
except WouldBlock:
|
||||||
|
# Add ourselves in the queue
|
||||||
|
receive_event = Event()
|
||||||
|
container: list[T_co] = []
|
||||||
|
self._state.waiting_receivers[receive_event] = container
|
||||||
|
|
||||||
|
try:
|
||||||
|
await receive_event.wait()
|
||||||
|
except get_cancelled_exc_class():
|
||||||
|
# Ignore the immediate cancellation if we already received an item, so as not to
|
||||||
|
# lose it
|
||||||
|
if not container:
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._state.waiting_receivers.pop(receive_event, None)
|
||||||
|
|
||||||
|
if container:
|
||||||
|
return container[0]
|
||||||
|
else:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
def clone(self) -> MemoryObjectReceiveStream[T_co]:
|
||||||
|
"""
|
||||||
|
Create a clone of this receive stream.
|
||||||
|
|
||||||
|
Each clone can be closed separately. Only when all clones have been closed will the
|
||||||
|
receiving end of the memory stream be considered closed by the sending ends.
|
||||||
|
|
||||||
|
:return: the cloned stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
return MemoryObjectReceiveStream(_state=self._state)
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""
|
||||||
|
Close the stream.
|
||||||
|
|
||||||
|
This works the exact same way as :meth:`aclose`, but is provided as a special case for the
|
||||||
|
benefit of synchronous callbacks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._closed:
|
||||||
|
self._closed = True
|
||||||
|
self._state.open_receive_channels -= 1
|
||||||
|
if self._state.open_receive_channels == 0:
|
||||||
|
send_events = list(self._state.waiting_senders.keys())
|
||||||
|
for event in send_events:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def statistics(self) -> MemoryObjectStreamStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this stream.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return self._state.statistics()
|
||||||
|
|
||||||
|
def __enter__(self) -> MemoryObjectReceiveStream[T_co]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]):
|
||||||
|
_state: MemoryObjectStreamState[T_contra]
|
||||||
|
_closed: bool = field(init=False, default=False)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
self._state.open_send_channels += 1
|
||||||
|
|
||||||
|
def send_nowait(self, item: T_contra) -> DeprecatedAwaitable:
|
||||||
|
"""
|
||||||
|
Send an item immediately if it can be done without waiting.
|
||||||
|
|
||||||
|
:param item: the item to send
|
||||||
|
:raises ~anyio.ClosedResourceError: if this send stream has been closed
|
||||||
|
:raises ~anyio.BrokenResourceError: if the stream has been closed from the
|
||||||
|
receiving end
|
||||||
|
:raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting
|
||||||
|
to receive
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
if not self._state.open_receive_channels:
|
||||||
|
raise BrokenResourceError
|
||||||
|
|
||||||
|
if self._state.waiting_receivers:
|
||||||
|
receive_event, container = self._state.waiting_receivers.popitem(last=False)
|
||||||
|
container.append(item)
|
||||||
|
receive_event.set()
|
||||||
|
elif len(self._state.buffer) < self._state.max_buffer_size:
|
||||||
|
self._state.buffer.append(item)
|
||||||
|
else:
|
||||||
|
raise WouldBlock
|
||||||
|
|
||||||
|
return DeprecatedAwaitable(self.send_nowait)
|
||||||
|
|
||||||
|
async def send(self, item: T_contra) -> None:
|
||||||
|
await checkpoint()
|
||||||
|
try:
|
||||||
|
self.send_nowait(item)
|
||||||
|
except WouldBlock:
|
||||||
|
# Wait until there's someone on the receiving end
|
||||||
|
send_event = Event()
|
||||||
|
self._state.waiting_senders[send_event] = item
|
||||||
|
try:
|
||||||
|
await send_event.wait()
|
||||||
|
except BaseException:
|
||||||
|
self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type]
|
||||||
|
raise
|
||||||
|
|
||||||
|
if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type]
|
||||||
|
raise BrokenResourceError
|
||||||
|
|
||||||
|
def clone(self) -> MemoryObjectSendStream[T_contra]:
|
||||||
|
"""
|
||||||
|
Create a clone of this send stream.
|
||||||
|
|
||||||
|
Each clone can be closed separately. Only when all clones have been closed will the
|
||||||
|
sending end of the memory stream be considered closed by the receiving ends.
|
||||||
|
|
||||||
|
:return: the cloned stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
return MemoryObjectSendStream(_state=self._state)
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""
|
||||||
|
Close the stream.
|
||||||
|
|
||||||
|
This works the exact same way as :meth:`aclose`, but is provided as a special case for the
|
||||||
|
benefit of synchronous callbacks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._closed:
|
||||||
|
self._closed = True
|
||||||
|
self._state.open_send_channels -= 1
|
||||||
|
if self._state.open_send_channels == 0:
|
||||||
|
receive_events = list(self._state.waiting_receivers.keys())
|
||||||
|
self._state.waiting_receivers.clear()
|
||||||
|
for event in receive_events:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def statistics(self) -> MemoryObjectStreamStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this stream.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return self._state.statistics()
|
||||||
|
|
||||||
|
def __enter__(self) -> MemoryObjectSendStream[T_contra]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.close()
|
|
@ -0,0 +1,140 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Callable, Generic, Mapping, Sequence, TypeVar
|
||||||
|
|
||||||
|
from ..abc import (
|
||||||
|
ByteReceiveStream,
|
||||||
|
ByteSendStream,
|
||||||
|
ByteStream,
|
||||||
|
Listener,
|
||||||
|
ObjectReceiveStream,
|
||||||
|
ObjectSendStream,
|
||||||
|
ObjectStream,
|
||||||
|
TaskGroup,
|
||||||
|
)
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
T_Stream = TypeVar("T_Stream")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class StapledByteStream(ByteStream):
|
||||||
|
"""
|
||||||
|
Combines two byte streams into a single, bidirectional byte stream.
|
||||||
|
|
||||||
|
Extra attributes will be provided from both streams, with the receive stream providing the
|
||||||
|
values in case of a conflict.
|
||||||
|
|
||||||
|
:param ByteSendStream send_stream: the sending byte stream
|
||||||
|
:param ByteReceiveStream receive_stream: the receiving byte stream
|
||||||
|
"""
|
||||||
|
|
||||||
|
send_stream: ByteSendStream
|
||||||
|
receive_stream: ByteReceiveStream
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
return await self.receive_stream.receive(max_bytes)
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
await self.send_stream.send(item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
await self.receive_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self.send_stream.extra_attributes,
|
||||||
|
**self.receive_stream.extra_attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]):
|
||||||
|
"""
|
||||||
|
Combines two object streams into a single, bidirectional object stream.
|
||||||
|
|
||||||
|
Extra attributes will be provided from both streams, with the receive stream providing the
|
||||||
|
values in case of a conflict.
|
||||||
|
|
||||||
|
:param ObjectSendStream send_stream: the sending object stream
|
||||||
|
:param ObjectReceiveStream receive_stream: the receiving object stream
|
||||||
|
"""
|
||||||
|
|
||||||
|
send_stream: ObjectSendStream[T_Item]
|
||||||
|
receive_stream: ObjectReceiveStream[T_Item]
|
||||||
|
|
||||||
|
async def receive(self) -> T_Item:
|
||||||
|
return await self.receive_stream.receive()
|
||||||
|
|
||||||
|
async def send(self, item: T_Item) -> None:
|
||||||
|
await self.send_stream.send(item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
await self.receive_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self.send_stream.extra_attributes,
|
||||||
|
**self.receive_stream.extra_attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MultiListener(Generic[T_Stream], Listener[T_Stream]):
|
||||||
|
"""
|
||||||
|
Combines multiple listeners into one, serving connections from all of them at once.
|
||||||
|
|
||||||
|
Any MultiListeners in the given collection of listeners will have their listeners moved into
|
||||||
|
this one.
|
||||||
|
|
||||||
|
Extra attributes are provided from each listener, with each successive listener overriding any
|
||||||
|
conflicting attributes from the previous one.
|
||||||
|
|
||||||
|
:param listeners: listeners to serve
|
||||||
|
:type listeners: Sequence[Listener[T_Stream]]
|
||||||
|
"""
|
||||||
|
|
||||||
|
listeners: Sequence[Listener[T_Stream]]
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
listeners: list[Listener[T_Stream]] = []
|
||||||
|
for listener in self.listeners:
|
||||||
|
if isinstance(listener, MultiListener):
|
||||||
|
listeners.extend(listener.listeners)
|
||||||
|
del listener.listeners[:] # type: ignore[attr-defined]
|
||||||
|
else:
|
||||||
|
listeners.append(listener)
|
||||||
|
|
||||||
|
self.listeners = listeners
|
||||||
|
|
||||||
|
async def serve(
|
||||||
|
self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None
|
||||||
|
) -> None:
|
||||||
|
from .. import create_task_group
|
||||||
|
|
||||||
|
async with create_task_group() as tg:
|
||||||
|
for listener in self.listeners:
|
||||||
|
tg.start_soon(listener.serve, handler, task_group)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
for listener in self.listeners:
|
||||||
|
await listener.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
attributes: dict = {}
|
||||||
|
for listener in self.listeners:
|
||||||
|
attributes.update(listener.extra_attributes)
|
||||||
|
|
||||||
|
return attributes
|
|
@ -0,0 +1,143 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
from dataclasses import InitVar, dataclass, field
|
||||||
|
from typing import Any, Callable, Mapping
|
||||||
|
|
||||||
|
from ..abc import (
|
||||||
|
AnyByteReceiveStream,
|
||||||
|
AnyByteSendStream,
|
||||||
|
AnyByteStream,
|
||||||
|
ObjectReceiveStream,
|
||||||
|
ObjectSendStream,
|
||||||
|
ObjectStream,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TextReceiveStream(ObjectReceiveStream[str]):
|
||||||
|
"""
|
||||||
|
Stream wrapper that decodes bytes to strings using the given encoding.
|
||||||
|
|
||||||
|
Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely
|
||||||
|
received unicode characters as soon as they come in.
|
||||||
|
|
||||||
|
:param transport_stream: any bytes-based receive stream
|
||||||
|
:param encoding: character encoding to use for decoding bytes to strings (defaults to
|
||||||
|
``utf-8``)
|
||||||
|
:param errors: handling scheme for decoding errors (defaults to ``strict``; see the
|
||||||
|
`codecs module documentation`_ for a comprehensive list of options)
|
||||||
|
|
||||||
|
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteReceiveStream
|
||||||
|
encoding: InitVar[str] = "utf-8"
|
||||||
|
errors: InitVar[str] = "strict"
|
||||||
|
_decoder: codecs.IncrementalDecoder = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self, encoding: str, errors: str) -> None:
|
||||||
|
decoder_class = codecs.getincrementaldecoder(encoding)
|
||||||
|
self._decoder = decoder_class(errors=errors)
|
||||||
|
|
||||||
|
async def receive(self) -> str:
|
||||||
|
while True:
|
||||||
|
chunk = await self.transport_stream.receive()
|
||||||
|
decoded = self._decoder.decode(chunk)
|
||||||
|
if decoded:
|
||||||
|
return decoded
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.transport_stream.aclose()
|
||||||
|
self._decoder.reset()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return self.transport_stream.extra_attributes
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TextSendStream(ObjectSendStream[str]):
|
||||||
|
"""
|
||||||
|
Sends strings to the wrapped stream as bytes using the given encoding.
|
||||||
|
|
||||||
|
:param AnyByteSendStream transport_stream: any bytes-based send stream
|
||||||
|
:param str encoding: character encoding to use for encoding strings to bytes (defaults to
|
||||||
|
``utf-8``)
|
||||||
|
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
|
||||||
|
`codecs module documentation`_ for a comprehensive list of options)
|
||||||
|
|
||||||
|
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteSendStream
|
||||||
|
encoding: InitVar[str] = "utf-8"
|
||||||
|
errors: str = "strict"
|
||||||
|
_encoder: Callable[..., tuple[bytes, int]] = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self, encoding: str) -> None:
|
||||||
|
self._encoder = codecs.getencoder(encoding)
|
||||||
|
|
||||||
|
async def send(self, item: str) -> None:
|
||||||
|
encoded = self._encoder(item, self.errors)[0]
|
||||||
|
await self.transport_stream.send(encoded)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.transport_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return self.transport_stream.extra_attributes
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TextStream(ObjectStream[str]):
|
||||||
|
"""
|
||||||
|
A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on
|
||||||
|
send.
|
||||||
|
|
||||||
|
Extra attributes will be provided from both streams, with the receive stream providing the
|
||||||
|
values in case of a conflict.
|
||||||
|
|
||||||
|
:param AnyByteStream transport_stream: any bytes-based stream
|
||||||
|
:param str encoding: character encoding to use for encoding/decoding strings to/from bytes
|
||||||
|
(defaults to ``utf-8``)
|
||||||
|
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
|
||||||
|
`codecs module documentation`_ for a comprehensive list of options)
|
||||||
|
|
||||||
|
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteStream
|
||||||
|
encoding: InitVar[str] = "utf-8"
|
||||||
|
errors: InitVar[str] = "strict"
|
||||||
|
_receive_stream: TextReceiveStream = field(init=False)
|
||||||
|
_send_stream: TextSendStream = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self, encoding: str, errors: str) -> None:
|
||||||
|
self._receive_stream = TextReceiveStream(
|
||||||
|
self.transport_stream, encoding=encoding, errors=errors
|
||||||
|
)
|
||||||
|
self._send_stream = TextSendStream(
|
||||||
|
self.transport_stream, encoding=encoding, errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
|
async def receive(self) -> str:
|
||||||
|
return await self._receive_stream.receive()
|
||||||
|
|
||||||
|
async def send(self, item: str) -> None:
|
||||||
|
await self._send_stream.send(item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self.transport_stream.send_eof()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self._send_stream.aclose()
|
||||||
|
await self._receive_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self._send_stream.extra_attributes,
|
||||||
|
**self._receive_stream.extra_attributes,
|
||||||
|
}
|
|
@ -0,0 +1,320 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import ssl
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import wraps
|
||||||
|
from typing import Any, Callable, Mapping, Tuple, TypeVar
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
BrokenResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
aclose_forcefully,
|
||||||
|
get_cancelled_exc_class,
|
||||||
|
)
|
||||||
|
from .._core._typedattr import TypedAttributeSet, typed_attribute
|
||||||
|
from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
_PCTRTT = Tuple[Tuple[str, str], ...]
|
||||||
|
_PCTRTTT = Tuple[_PCTRTT, ...]
|
||||||
|
|
||||||
|
|
||||||
|
class TLSAttribute(TypedAttributeSet):
|
||||||
|
"""Contains Transport Layer Security related attributes."""
|
||||||
|
|
||||||
|
#: the selected ALPN protocol
|
||||||
|
alpn_protocol: str | None = typed_attribute()
|
||||||
|
#: the channel binding for type ``tls-unique``
|
||||||
|
channel_binding_tls_unique: bytes = typed_attribute()
|
||||||
|
#: the selected cipher
|
||||||
|
cipher: tuple[str, str, int] = typed_attribute()
|
||||||
|
#: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert`
|
||||||
|
#: for more information)
|
||||||
|
peer_certificate: dict[str, str | _PCTRTTT | _PCTRTT] | None = typed_attribute()
|
||||||
|
#: the peer certificate in binary form
|
||||||
|
peer_certificate_binary: bytes | None = typed_attribute()
|
||||||
|
#: ``True`` if this is the server side of the connection
|
||||||
|
server_side: bool = typed_attribute()
|
||||||
|
#: ciphers shared by the client during the TLS handshake (``None`` if this is the
|
||||||
|
#: client side)
|
||||||
|
shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute()
|
||||||
|
#: the :class:`~ssl.SSLObject` used for encryption
|
||||||
|
ssl_object: ssl.SSLObject = typed_attribute()
|
||||||
|
#: ``True`` if this stream does (and expects) a closing TLS handshake when the
|
||||||
|
#: stream is being closed
|
||||||
|
standard_compatible: bool = typed_attribute()
|
||||||
|
#: the TLS protocol version (e.g. ``TLSv1.2``)
|
||||||
|
tls_version: str = typed_attribute()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TLSStream(ByteStream):
|
||||||
|
"""
|
||||||
|
A stream wrapper that encrypts all sent data and decrypts received data.
|
||||||
|
|
||||||
|
This class has no public initializer; use :meth:`wrap` instead.
|
||||||
|
All extra attributes from :class:`~TLSAttribute` are supported.
|
||||||
|
|
||||||
|
:var AnyByteStream transport_stream: the wrapped stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteStream
|
||||||
|
standard_compatible: bool
|
||||||
|
_ssl_object: ssl.SSLObject
|
||||||
|
_read_bio: ssl.MemoryBIO
|
||||||
|
_write_bio: ssl.MemoryBIO
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def wrap(
|
||||||
|
cls,
|
||||||
|
transport_stream: AnyByteStream,
|
||||||
|
*,
|
||||||
|
server_side: bool | None = None,
|
||||||
|
hostname: str | None = None,
|
||||||
|
ssl_context: ssl.SSLContext | None = None,
|
||||||
|
standard_compatible: bool = True,
|
||||||
|
) -> TLSStream:
|
||||||
|
"""
|
||||||
|
Wrap an existing stream with Transport Layer Security.
|
||||||
|
|
||||||
|
This performs a TLS handshake with the peer.
|
||||||
|
|
||||||
|
:param transport_stream: a bytes-transporting stream to wrap
|
||||||
|
:param server_side: ``True`` if this is the server side of the connection,
|
||||||
|
``False`` if this is the client side (if omitted, will be set to ``False``
|
||||||
|
if ``hostname`` has been provided, ``False`` otherwise). Used only to create
|
||||||
|
a default context when an explicit context has not been provided.
|
||||||
|
:param hostname: host name of the peer (if host name checking is desired)
|
||||||
|
:param ssl_context: the SSLContext object to use (if not provided, a secure
|
||||||
|
default will be created)
|
||||||
|
:param standard_compatible: if ``False``, skip the closing handshake when closing the
|
||||||
|
connection, and don't raise an exception if the peer does the same
|
||||||
|
:raises ~ssl.SSLError: if the TLS handshake fails
|
||||||
|
|
||||||
|
"""
|
||||||
|
if server_side is None:
|
||||||
|
server_side = not hostname
|
||||||
|
|
||||||
|
if not ssl_context:
|
||||||
|
purpose = (
|
||||||
|
ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH
|
||||||
|
)
|
||||||
|
ssl_context = ssl.create_default_context(purpose)
|
||||||
|
|
||||||
|
# Re-enable detection of unexpected EOFs if it was disabled by Python
|
||||||
|
if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"):
|
||||||
|
ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF
|
||||||
|
|
||||||
|
bio_in = ssl.MemoryBIO()
|
||||||
|
bio_out = ssl.MemoryBIO()
|
||||||
|
ssl_object = ssl_context.wrap_bio(
|
||||||
|
bio_in, bio_out, server_side=server_side, server_hostname=hostname
|
||||||
|
)
|
||||||
|
wrapper = cls(
|
||||||
|
transport_stream=transport_stream,
|
||||||
|
standard_compatible=standard_compatible,
|
||||||
|
_ssl_object=ssl_object,
|
||||||
|
_read_bio=bio_in,
|
||||||
|
_write_bio=bio_out,
|
||||||
|
)
|
||||||
|
await wrapper._call_sslobject_method(ssl_object.do_handshake)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
async def _call_sslobject_method(
|
||||||
|
self, func: Callable[..., T_Retval], *args: object
|
||||||
|
) -> T_Retval:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
result = func(*args)
|
||||||
|
except ssl.SSLWantReadError:
|
||||||
|
try:
|
||||||
|
# Flush any pending writes first
|
||||||
|
if self._write_bio.pending:
|
||||||
|
await self.transport_stream.send(self._write_bio.read())
|
||||||
|
|
||||||
|
data = await self.transport_stream.receive()
|
||||||
|
except EndOfStream:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
except OSError as exc:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
else:
|
||||||
|
self._read_bio.write(data)
|
||||||
|
except ssl.SSLWantWriteError:
|
||||||
|
await self.transport_stream.send(self._write_bio.read())
|
||||||
|
except ssl.SSLSyscallError as exc:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
except ssl.SSLError as exc:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
if (
|
||||||
|
isinstance(exc, ssl.SSLEOFError)
|
||||||
|
or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror
|
||||||
|
):
|
||||||
|
if self.standard_compatible:
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
else:
|
||||||
|
raise EndOfStream from None
|
||||||
|
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# Flush any pending writes first
|
||||||
|
if self._write_bio.pending:
|
||||||
|
await self.transport_stream.send(self._write_bio.read())
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def unwrap(self) -> tuple[AnyByteStream, bytes]:
|
||||||
|
"""
|
||||||
|
Does the TLS closing handshake.
|
||||||
|
|
||||||
|
:return: a tuple of (wrapped byte stream, bytes left in the read buffer)
|
||||||
|
|
||||||
|
"""
|
||||||
|
await self._call_sslobject_method(self._ssl_object.unwrap)
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
return self.transport_stream, self._read_bio.read()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
if self.standard_compatible:
|
||||||
|
try:
|
||||||
|
await self.unwrap()
|
||||||
|
except BaseException:
|
||||||
|
await aclose_forcefully(self.transport_stream)
|
||||||
|
raise
|
||||||
|
|
||||||
|
await self.transport_stream.aclose()
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
data = await self._call_sslobject_method(self._ssl_object.read, max_bytes)
|
||||||
|
if not data:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
await self._call_sslobject_method(self._ssl_object.write, item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
tls_version = self.extra(TLSAttribute.tls_version)
|
||||||
|
match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version)
|
||||||
|
if match:
|
||||||
|
major, minor = int(match.group(1)), int(match.group(2) or 0)
|
||||||
|
if (major, minor) < (1, 3):
|
||||||
|
raise NotImplementedError(
|
||||||
|
f"send_eof() requires at least TLSv1.3; current "
|
||||||
|
f"session uses {tls_version}"
|
||||||
|
)
|
||||||
|
|
||||||
|
raise NotImplementedError(
|
||||||
|
"send_eof() has not yet been implemented for TLS streams"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self.transport_stream.extra_attributes,
|
||||||
|
TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol,
|
||||||
|
TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding,
|
||||||
|
TLSAttribute.cipher: self._ssl_object.cipher,
|
||||||
|
TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False),
|
||||||
|
TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(
|
||||||
|
True
|
||||||
|
),
|
||||||
|
TLSAttribute.server_side: lambda: self._ssl_object.server_side,
|
||||||
|
TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers()
|
||||||
|
if self._ssl_object.server_side
|
||||||
|
else None,
|
||||||
|
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
|
||||||
|
TLSAttribute.ssl_object: lambda: self._ssl_object,
|
||||||
|
TLSAttribute.tls_version: self._ssl_object.version,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TLSListener(Listener[TLSStream]):
|
||||||
|
"""
|
||||||
|
A convenience listener that wraps another listener and auto-negotiates a TLS session on every
|
||||||
|
accepted connection.
|
||||||
|
|
||||||
|
If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is
|
||||||
|
called to do whatever post-mortem processing is deemed necessary.
|
||||||
|
|
||||||
|
Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute.
|
||||||
|
|
||||||
|
:param Listener listener: the listener to wrap
|
||||||
|
:param ssl_context: the SSL context object
|
||||||
|
:param standard_compatible: a flag passed through to :meth:`TLSStream.wrap`
|
||||||
|
:param handshake_timeout: time limit for the TLS handshake
|
||||||
|
(passed to :func:`~anyio.fail_after`)
|
||||||
|
"""
|
||||||
|
|
||||||
|
listener: Listener[Any]
|
||||||
|
ssl_context: ssl.SSLContext
|
||||||
|
standard_compatible: bool = True
|
||||||
|
handshake_timeout: float = 30
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None:
|
||||||
|
"""
|
||||||
|
Handle an exception raised during the TLS handshake.
|
||||||
|
|
||||||
|
This method does 3 things:
|
||||||
|
|
||||||
|
#. Forcefully closes the original stream
|
||||||
|
#. Logs the exception (unless it was a cancellation exception) using the
|
||||||
|
``anyio.streams.tls`` logger
|
||||||
|
#. Reraises the exception if it was a base exception or a cancellation exception
|
||||||
|
|
||||||
|
:param exc: the exception
|
||||||
|
:param stream: the original stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
await aclose_forcefully(stream)
|
||||||
|
|
||||||
|
# Log all except cancellation exceptions
|
||||||
|
if not isinstance(exc, get_cancelled_exc_class()):
|
||||||
|
logging.getLogger(__name__).exception("Error during TLS handshake")
|
||||||
|
|
||||||
|
# Only reraise base exceptions and cancellation exceptions
|
||||||
|
if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()):
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def serve(
|
||||||
|
self,
|
||||||
|
handler: Callable[[TLSStream], Any],
|
||||||
|
task_group: TaskGroup | None = None,
|
||||||
|
) -> None:
|
||||||
|
@wraps(handler)
|
||||||
|
async def handler_wrapper(stream: AnyByteStream) -> None:
|
||||||
|
from .. import fail_after
|
||||||
|
|
||||||
|
try:
|
||||||
|
with fail_after(self.handshake_timeout):
|
||||||
|
wrapped_stream = await TLSStream.wrap(
|
||||||
|
stream,
|
||||||
|
ssl_context=self.ssl_context,
|
||||||
|
standard_compatible=self.standard_compatible,
|
||||||
|
)
|
||||||
|
except BaseException as exc:
|
||||||
|
await self.handle_handshake_error(exc, stream)
|
||||||
|
else:
|
||||||
|
await handler(wrapped_stream)
|
||||||
|
|
||||||
|
await self.listener.serve(handler_wrapper, task_group)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.listener.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
|
||||||
|
}
|
|
@ -0,0 +1,249 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from collections import deque
|
||||||
|
from importlib.util import module_from_spec, spec_from_file_location
|
||||||
|
from typing import Callable, TypeVar, cast
|
||||||
|
|
||||||
|
from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class
|
||||||
|
from ._core._exceptions import BrokenWorkerProcess
|
||||||
|
from ._core._subprocesses import open_process
|
||||||
|
from ._core._synchronization import CapacityLimiter
|
||||||
|
from ._core._tasks import CancelScope, fail_after
|
||||||
|
from .abc import ByteReceiveStream, ByteSendStream, Process
|
||||||
|
from .lowlevel import RunVar, checkpoint_if_cancelled
|
||||||
|
from .streams.buffered import BufferedByteReceiveStream
|
||||||
|
|
||||||
|
WORKER_MAX_IDLE_TIME = 300 # 5 minutes
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers")
|
||||||
|
_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar(
|
||||||
|
"_process_pool_idle_workers"
|
||||||
|
)
|
||||||
|
_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter")
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync(
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
*args: object,
|
||||||
|
cancellable: bool = False,
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function with the given arguments in a worker process.
|
||||||
|
|
||||||
|
If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
|
||||||
|
the worker process running it will be abruptly terminated using SIGKILL (or
|
||||||
|
``terminateProcess()`` on Windows).
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:param cancellable: ``True`` to allow cancellation of the operation while it's running
|
||||||
|
:param limiter: capacity limiter to use to limit the total amount of processes running
|
||||||
|
(if omitted, the default limiter is used)
|
||||||
|
:return: an awaitable that yields the return value of the function.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def send_raw_command(pickled_cmd: bytes) -> object:
|
||||||
|
try:
|
||||||
|
await stdin.send(pickled_cmd)
|
||||||
|
response = await buffered.receive_until(b"\n", 50)
|
||||||
|
status, length = response.split(b" ")
|
||||||
|
if status not in (b"RETURN", b"EXCEPTION"):
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Worker process returned unexpected response: {response!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
pickled_response = await buffered.receive_exactly(int(length))
|
||||||
|
except BaseException as exc:
|
||||||
|
workers.discard(process)
|
||||||
|
try:
|
||||||
|
process.kill()
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
await process.aclose()
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if isinstance(exc, get_cancelled_exc_class()):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
raise BrokenWorkerProcess from exc
|
||||||
|
|
||||||
|
retval = pickle.loads(pickled_response)
|
||||||
|
if status == b"EXCEPTION":
|
||||||
|
assert isinstance(retval, BaseException)
|
||||||
|
raise retval
|
||||||
|
else:
|
||||||
|
return retval
|
||||||
|
|
||||||
|
# First pickle the request before trying to reserve a worker process
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL)
|
||||||
|
|
||||||
|
# If this is the first run in this event loop thread, set up the necessary variables
|
||||||
|
try:
|
||||||
|
workers = _process_pool_workers.get()
|
||||||
|
idle_workers = _process_pool_idle_workers.get()
|
||||||
|
except LookupError:
|
||||||
|
workers = set()
|
||||||
|
idle_workers = deque()
|
||||||
|
_process_pool_workers.set(workers)
|
||||||
|
_process_pool_idle_workers.set(idle_workers)
|
||||||
|
get_asynclib().setup_process_pool_exit_at_shutdown(workers)
|
||||||
|
|
||||||
|
async with (limiter or current_default_process_limiter()):
|
||||||
|
# Pop processes from the pool (starting from the most recently used) until we find one that
|
||||||
|
# hasn't exited yet
|
||||||
|
process: Process
|
||||||
|
while idle_workers:
|
||||||
|
process, idle_since = idle_workers.pop()
|
||||||
|
if process.returncode is None:
|
||||||
|
stdin = cast(ByteSendStream, process.stdin)
|
||||||
|
buffered = BufferedByteReceiveStream(
|
||||||
|
cast(ByteReceiveStream, process.stdout)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or
|
||||||
|
# longer
|
||||||
|
now = current_time()
|
||||||
|
killed_processes: list[Process] = []
|
||||||
|
while idle_workers:
|
||||||
|
if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME:
|
||||||
|
break
|
||||||
|
|
||||||
|
process, idle_since = idle_workers.popleft()
|
||||||
|
process.kill()
|
||||||
|
workers.remove(process)
|
||||||
|
killed_processes.append(process)
|
||||||
|
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
for process in killed_processes:
|
||||||
|
await process.aclose()
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
workers.remove(process)
|
||||||
|
else:
|
||||||
|
command = [sys.executable, "-u", "-m", __name__]
|
||||||
|
process = await open_process(
|
||||||
|
command, stdin=subprocess.PIPE, stdout=subprocess.PIPE
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
stdin = cast(ByteSendStream, process.stdin)
|
||||||
|
buffered = BufferedByteReceiveStream(
|
||||||
|
cast(ByteReceiveStream, process.stdout)
|
||||||
|
)
|
||||||
|
with fail_after(20):
|
||||||
|
message = await buffered.receive(6)
|
||||||
|
|
||||||
|
if message != b"READY\n":
|
||||||
|
raise BrokenWorkerProcess(
|
||||||
|
f"Worker process returned unexpected response: {message!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
main_module_path = getattr(sys.modules["__main__"], "__file__", None)
|
||||||
|
pickled = pickle.dumps(
|
||||||
|
("init", sys.path, main_module_path),
|
||||||
|
protocol=pickle.HIGHEST_PROTOCOL,
|
||||||
|
)
|
||||||
|
await send_raw_command(pickled)
|
||||||
|
except (BrokenWorkerProcess, get_cancelled_exc_class()):
|
||||||
|
raise
|
||||||
|
except BaseException as exc:
|
||||||
|
process.kill()
|
||||||
|
raise BrokenWorkerProcess(
|
||||||
|
"Error during worker process initialization"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
workers.add(process)
|
||||||
|
|
||||||
|
with CancelScope(shield=not cancellable):
|
||||||
|
try:
|
||||||
|
return cast(T_Retval, await send_raw_command(request))
|
||||||
|
finally:
|
||||||
|
if process in workers:
|
||||||
|
idle_workers.append((process, current_time()))
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_process_limiter() -> CapacityLimiter:
|
||||||
|
"""
|
||||||
|
Return the capacity limiter that is used by default to limit the number of worker processes.
|
||||||
|
|
||||||
|
:return: a capacity limiter object
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return _default_process_limiter.get()
|
||||||
|
except LookupError:
|
||||||
|
limiter = CapacityLimiter(os.cpu_count() or 2)
|
||||||
|
_default_process_limiter.set(limiter)
|
||||||
|
return limiter
|
||||||
|
|
||||||
|
|
||||||
|
def process_worker() -> None:
|
||||||
|
# Redirect standard streams to os.devnull so that user code won't interfere with the
|
||||||
|
# parent-worker communication
|
||||||
|
stdin = sys.stdin
|
||||||
|
stdout = sys.stdout
|
||||||
|
sys.stdin = open(os.devnull)
|
||||||
|
sys.stdout = open(os.devnull, "w")
|
||||||
|
|
||||||
|
stdout.buffer.write(b"READY\n")
|
||||||
|
while True:
|
||||||
|
retval = exception = None
|
||||||
|
try:
|
||||||
|
command, *args = pickle.load(stdin.buffer)
|
||||||
|
except EOFError:
|
||||||
|
return
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
else:
|
||||||
|
if command == "run":
|
||||||
|
func, args = args
|
||||||
|
try:
|
||||||
|
retval = func(*args)
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
elif command == "init":
|
||||||
|
main_module_path: str | None
|
||||||
|
sys.path, main_module_path = args
|
||||||
|
del sys.modules["__main__"]
|
||||||
|
if main_module_path:
|
||||||
|
# Load the parent's main module but as __mp_main__ instead of __main__
|
||||||
|
# (like multiprocessing does) to avoid infinite recursion
|
||||||
|
try:
|
||||||
|
spec = spec_from_file_location("__mp_main__", main_module_path)
|
||||||
|
if spec and spec.loader:
|
||||||
|
main = module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(main)
|
||||||
|
sys.modules["__main__"] = main
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
if exception is not None:
|
||||||
|
status = b"EXCEPTION"
|
||||||
|
pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL)
|
||||||
|
else:
|
||||||
|
status = b"RETURN"
|
||||||
|
pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL)
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
status = b"EXCEPTION"
|
||||||
|
pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL)
|
||||||
|
|
||||||
|
stdout.buffer.write(b"%s %d\n" % (status, len(pickled)))
|
||||||
|
stdout.buffer.write(pickled)
|
||||||
|
|
||||||
|
# Respect SIGTERM
|
||||||
|
if isinstance(exception, SystemExit):
|
||||||
|
raise exception
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
process_worker()
|
|
@ -0,0 +1,67 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Callable, TypeVar
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
from ._core._eventloop import get_asynclib
|
||||||
|
from .abc import CapacityLimiter
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync(
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
*args: object,
|
||||||
|
cancellable: bool = False,
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function with the given arguments in a worker thread.
|
||||||
|
|
||||||
|
If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
|
||||||
|
the thread will still run its course but its return value (or any raised exception) will be
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:param cancellable: ``True`` to allow cancellation of the operation
|
||||||
|
:param limiter: capacity limiter to use to limit the total amount of threads running
|
||||||
|
(if omitted, the default limiter is used)
|
||||||
|
:return: an awaitable that yields the return value of the function.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await get_asynclib().run_sync_in_worker_thread(
|
||||||
|
func, *args, cancellable=cancellable, limiter=limiter
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync_in_worker_thread(
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
*args: object,
|
||||||
|
cancellable: bool = False,
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
warn(
|
||||||
|
"run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return await run_sync(func, *args, cancellable=cancellable, limiter=limiter)
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_thread_limiter() -> CapacityLimiter:
|
||||||
|
"""
|
||||||
|
Return the capacity limiter that is used by default to limit the number of concurrent threads.
|
||||||
|
|
||||||
|
:return: a capacity limiter object
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_asynclib().current_default_thread_limiter()
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_worker_thread_limiter() -> CapacityLimiter:
|
||||||
|
warn(
|
||||||
|
"current_default_worker_thread_limiter() has been deprecated, "
|
||||||
|
"use anyio.to_thread.current_default_thread_limiter() instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return current_default_thread_limiter()
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,28 @@
|
||||||
|
Copyright 2014 Pallets
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -0,0 +1,103 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: click
|
||||||
|
Version: 8.1.6
|
||||||
|
Summary: Composable command line interface toolkit
|
||||||
|
Home-page: https://palletsprojects.com/p/click/
|
||||||
|
Maintainer: Pallets
|
||||||
|
Maintainer-email: contact@palletsprojects.com
|
||||||
|
License: BSD-3-Clause
|
||||||
|
Project-URL: Donate, https://palletsprojects.com/donate
|
||||||
|
Project-URL: Documentation, https://click.palletsprojects.com/
|
||||||
|
Project-URL: Changes, https://click.palletsprojects.com/changes/
|
||||||
|
Project-URL: Source Code, https://github.com/pallets/click/
|
||||||
|
Project-URL: Issue Tracker, https://github.com/pallets/click/issues/
|
||||||
|
Project-URL: Chat, https://discord.gg/pallets
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
License-File: LICENSE.rst
|
||||||
|
Requires-Dist: colorama ; platform_system == "Windows"
|
||||||
|
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||||
|
|
||||||
|
\$ click\_
|
||||||
|
==========
|
||||||
|
|
||||||
|
Click is a Python package for creating beautiful command line interfaces
|
||||||
|
in a composable way with as little code as necessary. It's the "Command
|
||||||
|
Line Interface Creation Kit". It's highly configurable but comes with
|
||||||
|
sensible defaults out of the box.
|
||||||
|
|
||||||
|
It aims to make the process of writing command line tools quick and fun
|
||||||
|
while also preventing any frustration caused by the inability to
|
||||||
|
implement an intended CLI API.
|
||||||
|
|
||||||
|
Click in three points:
|
||||||
|
|
||||||
|
- Arbitrary nesting of commands
|
||||||
|
- Automatic help page generation
|
||||||
|
- Supports lazy loading of subcommands at runtime
|
||||||
|
|
||||||
|
|
||||||
|
Installing
|
||||||
|
----------
|
||||||
|
|
||||||
|
Install and update using `pip`_:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
$ pip install -U click
|
||||||
|
|
||||||
|
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||||
|
|
||||||
|
|
||||||
|
A Simple Example
|
||||||
|
----------------
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--count", default=1, help="Number of greetings.")
|
||||||
|
@click.option("--name", prompt="Your name", help="The person to greet.")
|
||||||
|
def hello(count, name):
|
||||||
|
"""Simple program that greets NAME for a total of COUNT times."""
|
||||||
|
for _ in range(count):
|
||||||
|
click.echo(f"Hello, {name}!")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
hello()
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
$ python hello.py --count=3
|
||||||
|
Your name: Click
|
||||||
|
Hello, Click!
|
||||||
|
Hello, Click!
|
||||||
|
Hello, Click!
|
||||||
|
|
||||||
|
|
||||||
|
Donate
|
||||||
|
------
|
||||||
|
|
||||||
|
The Pallets organization develops and supports Click and other popular
|
||||||
|
packages. In order to grow the community of contributors and users, and
|
||||||
|
allow the maintainers to devote more time to the projects, `please
|
||||||
|
donate today`_.
|
||||||
|
|
||||||
|
.. _please donate today: https://palletsprojects.com/donate
|
||||||
|
|
||||||
|
|
||||||
|
Links
|
||||||
|
-----
|
||||||
|
|
||||||
|
- Documentation: https://click.palletsprojects.com/
|
||||||
|
- Changes: https://click.palletsprojects.com/changes/
|
||||||
|
- PyPI Releases: https://pypi.org/project/click/
|
||||||
|
- Source Code: https://github.com/pallets/click
|
||||||
|
- Issue Tracker: https://github.com/pallets/click/issues
|
||||||
|
- Chat: https://discord.gg/pallets
|
|
@ -0,0 +1,39 @@
|
||||||
|
click-8.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
click-8.1.6.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
||||||
|
click-8.1.6.dist-info/METADATA,sha256=W1Mybcg8Rb1eG1EDtUDutr477A_PIJDi6UOpzqohBxo,3014
|
||||||
|
click-8.1.6.dist-info/RECORD,,
|
||||||
|
click-8.1.6.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
||||||
|
click-8.1.6.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6
|
||||||
|
click/__init__.py,sha256=85FPxtPII3PQQmI--C_LEOelVOl_T6w935vVDG-JG2A,3138
|
||||||
|
click/__pycache__/__init__.cpython-310.pyc,,
|
||||||
|
click/__pycache__/_compat.cpython-310.pyc,,
|
||||||
|
click/__pycache__/_termui_impl.cpython-310.pyc,,
|
||||||
|
click/__pycache__/_textwrap.cpython-310.pyc,,
|
||||||
|
click/__pycache__/_winconsole.cpython-310.pyc,,
|
||||||
|
click/__pycache__/core.cpython-310.pyc,,
|
||||||
|
click/__pycache__/decorators.cpython-310.pyc,,
|
||||||
|
click/__pycache__/exceptions.cpython-310.pyc,,
|
||||||
|
click/__pycache__/formatting.cpython-310.pyc,,
|
||||||
|
click/__pycache__/globals.cpython-310.pyc,,
|
||||||
|
click/__pycache__/parser.cpython-310.pyc,,
|
||||||
|
click/__pycache__/shell_completion.cpython-310.pyc,,
|
||||||
|
click/__pycache__/termui.cpython-310.pyc,,
|
||||||
|
click/__pycache__/testing.cpython-310.pyc,,
|
||||||
|
click/__pycache__/types.cpython-310.pyc,,
|
||||||
|
click/__pycache__/utils.cpython-310.pyc,,
|
||||||
|
click/_compat.py,sha256=IGKh_J5QdfKELitnRfTGHneejWxoCw_NX9tfMbdcg3w,18730
|
||||||
|
click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069
|
||||||
|
click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353
|
||||||
|
click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860
|
||||||
|
click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086
|
||||||
|
click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719
|
||||||
|
click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273
|
||||||
|
click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706
|
||||||
|
click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961
|
||||||
|
click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067
|
||||||
|
click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
click/shell_completion.py,sha256=azDxqZAVS6SRTF7alFCFoCMam3g5IyaMUO-PoOT4imY,18396
|
||||||
|
click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324
|
||||||
|
click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084
|
||||||
|
click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391
|
||||||
|
click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298
|
|
@ -0,0 +1,5 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.40.0)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
click
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue