Carga_GTFS/cargaAPI/router/router.py

194 lines
7.3 KiB
Python
Raw Permalink Normal View History

2024-01-12 13:51:55 -03:00
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.exc import SQLAlchemyError, InvalidRequestError
from database.db import engine, Base
from carga.procesos import (
extraer_zip,
identificar_zip,
inspeccionar_txts,
transformar_txts,
verificar_registros_db,
primera_carga,
extraer_db,
verificar_registros_obsoletos,
terminar_carga,
verificar_agregar_registros,
restaurar_db,
carga_error,
)
2023-09-08 15:48:17 -03:00
Base.prepare(engine)
def get_db():
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
db = SessionLocal()
try:
yield db
finally:
2024-01-12 13:51:55 -03:00
db.close()
2023-09-08 15:48:17 -03:00
2024-01-12 13:51:55 -03:00
config = {
"almacen_zips": "zip/",
"almacen_txts": "txts/",
"zips_procesados": "zips_procesados/",
}
2023-09-08 15:48:17 -03:00
carga_gtfs = APIRouter()
@carga_gtfs.get("/")
def root():
2024-01-12 13:51:55 -03:00
return "Pagina Principal. Use /docs en la URL para probar la carga."
2023-09-08 15:48:17 -03:00
@carga_gtfs.post("/carga")
async def carga(db: Session = Depends(get_db)):
2024-01-12 13:51:55 -03:00
zip_info = identificar_zip(db=db)
zip_ruta = config["almacen_zips"] + zip_info.zip + ".zip"
extraer_zip(zip=zip_info, zip_ruta=zip_ruta, db=db)
inspeccionar_txts(zip=zip_info, almacen_txts=config["almacen_txts"], zip_ruta=zip_ruta, db=db)
df_agency, df_calendar, df_routes, df_stop_times, df_stops, df_trips, df_shapes, df_shapes_reference = transformar_txts(
zip=zip_info, db=db
)
(
db_inicial_agency,
db_inicial_routes,
db_inicial_calendar,
db_inicial_stops,
db_inicial_shapes_reference,
db_inicial_shapes,
db_inicial_trips,
db_inicial_stop_times,
) = extraer_db(engine=engine)
bd_vacia = verificar_registros_db(db=db)
if bd_vacia:
primera_carga(
zip=zip_info,
db=db,
engine=engine,
df_agency=df_agency,
df_calendar=df_calendar,
df_routes=df_routes,
df_stop_times=df_stop_times,
df_stops=df_stops,
df_trips=df_trips,
df_shapes=df_shapes,
df_shapes_reference=df_shapes_reference,
)
terminar_carga(zip=zip_info, db=db, zips_procesados=config["zips_procesados"], directorio_txts=config["almacen_txts"])
return "Terminó correctamente"
else:
try:
(
df_db_agency,
df_db_routes,
df_db_calendar,
df_db_stops,
df_db_shapes_reference,
df_db_shapes,
df_db_trips,
df_db_stop_times,
) = extraer_db(engine=engine)
verificar_registros_obsoletos(
zip=zip_info,
df_agency=df_agency,
df_calendar=df_calendar,
df_routes=df_routes,
df_stop_times=df_stop_times,
df_stops=df_stops,
df_trips=df_trips,
df_shapes=df_shapes,
df_shapes_reference=df_shapes_reference,
df_db_agency=df_db_agency,
df_db_routes=df_db_routes,
df_db_calendar=df_db_calendar,
df_db_stops=df_db_stops,
df_db_shapes_reference=df_db_shapes_reference,
df_db_shapes=df_db_shapes,
df_db_trips=df_db_trips,
df_db_stop_times=df_db_stop_times,
db=db,
)
(
df_db_agency,
df_db_routes,
df_db_calendar,
df_db_stops,
df_db_shapes_reference,
df_db_shapes,
df_db_trips,
df_db_stop_times,
) = extraer_db(engine=engine)
verificar_agregar_registros(
zip=zip_info,
df_agency=df_agency,
df_calendar=df_calendar,
df_routes=df_routes,
df_stop_times=df_stop_times,
df_stops=df_stops,
df_trips=df_trips,
df_shapes=df_shapes,
df_shapes_reference=df_shapes_reference,
df_db_agency=df_db_agency,
df_db_routes=df_db_routes,
df_db_calendar=df_db_calendar,
df_db_stops=df_db_stops,
df_db_shapes_reference=df_db_shapes_reference,
df_db_shapes=df_db_shapes,
df_db_trips=df_db_trips,
df_db_stop_times=df_db_stop_times,
db=db,
)
terminar_carga(zip=zip_info, db=db, zips_procesados=config["zips_procesados"], directorio_txts=config["almacen_txts"])
return "La carga termino con exito."
except SQLAlchemyError as e:
restaurar_db(db)
primera_carga(
zip=zip_info,
db=db,
engine=engine,
df_agency=db_inicial_agency,
df_calendar=db_inicial_calendar,
df_routes=db_inicial_routes,
df_stop_times=db_inicial_stop_times,
df_stops=db_inicial_stops,
df_trips=db_inicial_trips,
df_shapes=db_inicial_shapes,
df_shapes_reference=db_inicial_shapes_reference,
)
carga_error(zip=zip_info, db=db, zips_procesados=config["zips_procesados"], directorio_txts=config["almacen_txts"])
raise HTTPException(status_code=500, detail=f"Error durante la carga: {str(e)}")
except Exception as e:
restaurar_db(db)
2023-09-08 15:48:17 -03:00
2024-01-12 13:51:55 -03:00
primera_carga(
zip=zip_info,
db=db,
engine=engine,
df_agency=db_inicial_agency,
df_calendar=db_inicial_calendar,
df_routes=db_inicial_routes,
df_stop_times=db_inicial_stop_times,
df_stops=db_inicial_stops,
df_trips=db_inicial_trips,
df_shapes=db_inicial_shapes,
df_shapes_reference=db_inicial_shapes_reference,
)
carga_error(zip=zip_info, db=db, zips_procesados=config["zips_procesados"], directorio_txts=config["almacen_txts"])
raise HTTPException(status_code=500, detail=f"Error durante la carga: {str(e)}")