avance, se proceso de archivo zip para cron
parent
73290773d5
commit
e2b5e7aadd
|
@ -6,27 +6,7 @@ ALTER TABLE rol_linea drop CONSTRAINT IF EXISTS rol_linea_id_linea_fkey;
|
|||
ALTER TABLE linea_paradero drop CONSTRAINT IF EXISTS linea_paradero_id_linea_fkey;
|
||||
|
||||
-----
|
||||
/*
|
||||
update operador
|
||||
set vigente =false
|
||||
where id_operador not in (select agency_id from z_agency za);
|
||||
|
||||
-----
|
||||
|
||||
update operador
|
||||
set vigente =true
|
||||
where id_operador in (select agency_id from z_agency za);
|
||||
|
||||
-----
|
||||
|
||||
insert into operador
|
||||
select agency_id, null as id_region , true as vigente,
|
||||
agency_name, agency_url , agency_timezone , agency_lang , agency_phone , agency_fare_url
|
||||
from z_agency za
|
||||
where agency_id not in (select id_operador from operador za);
|
||||
|
||||
-----
|
||||
*/
|
||||
delete from gtfs_calendar ;
|
||||
|
||||
-----
|
||||
|
@ -35,12 +15,6 @@ insert into gtfs_calendar
|
|||
select service_id, monday::bool , tuesday::bool , wednesday::bool , thursday::bool ,friday::bool ,saturday::bool ,sunday::bool
|
||||
from z_calendar zc;
|
||||
|
||||
-----
|
||||
/*
|
||||
update paradero p
|
||||
set vigente = false
|
||||
where id_paradero::text not in (select stop_id from z_stops );
|
||||
*/
|
||||
-----
|
||||
|
||||
update paradero
|
||||
|
@ -67,11 +41,8 @@ where stop_id not in (select id_paradero::text from paradero);
|
|||
|
||||
delete from linea_paradero;
|
||||
|
||||
-----
|
||||
|
||||
/*delete from linea*/ ;
|
||||
|
||||
-----
|
||||
|
||||
update linea
|
||||
set vigente = false
|
||||
where id_red in (select id_red from gtfs_archivo where trim(upper(status))='PROCESANDO' )
|
||||
|
@ -82,6 +53,8 @@ and id_linea not in (select trim(zr.route_id)||'-'||trim(zt.direction_id::var
|
|||
|
||||
);
|
||||
|
||||
-----
|
||||
|
||||
update linea
|
||||
set vigente = true
|
||||
where id_red in (select id_red from gtfs_archivo where trim(upper(status))='PROCESANDO' )
|
||||
|
@ -92,19 +65,7 @@ and id_linea in (select trim(zr.route_id)||'-'||trim(zt.direction_id::varchar)
|
|||
|
||||
);
|
||||
|
||||
/*
|
||||
update linea
|
||||
route_short_name = (select substring(zt.trip_headsign FROM '(\S+) -') as route_short_name )
|
||||
route_desc =
|
||||
route_type =
|
||||
route_url
|
||||
route_color
|
||||
route_text_color
|
||||
route_long_name
|
||||
vigente
|
||||
id_red = (select id_red from gtfs_archivo where trim(upper(status))='PROCESANDO' limit 1)
|
||||
id_linea
|
||||
*/
|
||||
-----
|
||||
|
||||
insert into linea
|
||||
select distinct
|
||||
|
@ -143,7 +104,6 @@ trim(zr.route_id)||'-'||trim(zt.direction_id::varchar) as route_id,
|
|||
where route_long_name not like '%'||route_short_name ||'%'
|
||||
and (trim(zr.route_id)||'-'||trim(zt.direction_id::varchar) ) not in (select id_linea from linea )
|
||||
union
|
||||
|
||||
select distinct
|
||||
trim(zr.route_id)||'-'||trim(zt.direction_id::varchar) as route_id,
|
||||
null,--zr.agency_id ,
|
|
@ -0,0 +1,134 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
from api.models import GtfsArchivo, RedTransporte
|
||||
from decouple import config
|
||||
import os
|
||||
import csv
|
||||
import zipfile
|
||||
import tempfile
|
||||
import threading
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Procesa los archivos gtfs en formato comprimido (zip)'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Lógica de tu comando aquí
|
||||
folder = config('GTFS_UPLOADS','/tmp')
|
||||
|
||||
redes = RedTransporte.objects.filter(vigente=True)
|
||||
for red in redes:
|
||||
print(f'nombre red: {red.nombre_red}')
|
||||
|
||||
gtfs_archivo = GtfsArchivo.objects \
|
||||
.filter(vigente=False, id_red = red.id_red, status = 'PENDIENTE') \
|
||||
.order_by('-created').first()
|
||||
|
||||
if gtfs_archivo != None:
|
||||
filepath = os.path.join(folder, gtfs_archivo.ruta_archivo)
|
||||
print(f'procesa: {filepath}')
|
||||
procesa_zip(filepath)
|
||||
|
||||
registro_anterior = GtfsArchivo.objects.filter(vigente=True, id_red = red.id_red)
|
||||
registro_anterior.vigente = False
|
||||
registro_anterior.save()
|
||||
|
||||
gtfs_archivo.status = 'PROCESADO'
|
||||
gtfs_archivo.vigente = True
|
||||
gtfs_archivo.save()
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('¡Comando ejecutado con éxito!'))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def procesa_zip(filepath):
|
||||
carpeta_destino = tempfile.gettempdir()
|
||||
|
||||
# descomprimir archivo zip
|
||||
archivo_zip = zipfile.ZipFile(filepath, "r")
|
||||
password=''
|
||||
try:
|
||||
# Extraer todos los archivos del archivo ZIP
|
||||
archivo_zip.extractall(pwd=password, path=carpeta_destino)
|
||||
print("Archivos extraídos con éxito", flush=True)
|
||||
except Exception as e:
|
||||
print("Error al extraer archivos:", e, flush=True)
|
||||
finally:
|
||||
archivo_zip.close()
|
||||
|
||||
# Lista de archivos GTFS para cargar en la base de datos
|
||||
gtfs_files = [
|
||||
"agency.txt",
|
||||
"calendar.txt",
|
||||
"feed_info.txt",
|
||||
"routes.txt",
|
||||
"shapes.txt",
|
||||
"stops.txt",
|
||||
"stop_times.txt",
|
||||
"trips.txt"
|
||||
]
|
||||
|
||||
try:
|
||||
# Crear las tablas en el esquema y cargar los datos
|
||||
cur = connection.cursor()
|
||||
for file in gtfs_files:
|
||||
filepath = os.path.join(carpeta_destino, file)
|
||||
if os.path.exists(filepath):
|
||||
create_and_load_table(filepath, file, cur)
|
||||
os.remove(filepath)
|
||||
|
||||
procesa_tablas_z(cur)
|
||||
|
||||
cur.close()
|
||||
connection.commit()
|
||||
|
||||
except Exception as e:
|
||||
print(f'ERROR: {e}', flush=True)
|
||||
finally:
|
||||
print('==============', flush=True)
|
||||
print(f'fin proceso archivo: {archivo_zip}', flush=True)
|
||||
|
||||
|
||||
|
||||
# Función para crear la definición de la tabla como texto y cargar los datos
|
||||
def create_and_load_table(filepath, file_name, cursor):
|
||||
with open(filepath, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.reader(file)
|
||||
columns = next(reader)
|
||||
|
||||
table_name = "z_"+file_name.replace('.txt', '')
|
||||
column_definitions = ',\n'.join([f"{column_name} TEXT" for column_name in columns])
|
||||
|
||||
create_table = f"CREATE TABLE IF NOT EXISTS {table_name} (\n{column_definitions}\n);"
|
||||
print(f'SQL> {create_table}', flush=True)
|
||||
cursor.execute(create_table)
|
||||
print('', flush=True)
|
||||
|
||||
truncate_table = f"truncate table {table_name}; commit;"
|
||||
print(f'SQL> {truncate_table}', flush=True)
|
||||
cursor.execute(truncate_table)
|
||||
print('', flush=True)
|
||||
|
||||
file.seek(0)
|
||||
next(reader)
|
||||
|
||||
insert_query = f"COPY {table_name} ({', '.join(columns)}) FROM STDIN WITH CSV"
|
||||
print(f'SQL> {insert_query}', flush=True)
|
||||
cursor.copy_expert(sql=insert_query, file=file)
|
||||
print('', flush=True)
|
||||
|
||||
|
||||
def procesa_tablas_z(cursor):
|
||||
current_folder = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
with open(os.path.join(current_folder, 'actualiza_GTFS.sql'),'r') as file:
|
||||
content = ''.join(file.readlines())
|
||||
|
||||
arr_sql = content.split('-----')
|
||||
for sql in arr_sql:
|
||||
sql = sql.strip()
|
||||
if sql > ' ':
|
||||
print(f'SQL> {sql}', flush=True)
|
||||
cursor.execute(sql)
|
||||
print('', flush=True)
|
Loading…
Reference in New Issue