forked from TDTP/admin_transporte_backend
154 lines
4.5 KiB
Python
154 lines
4.5 KiB
Python
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
from django.http import JsonResponse
|
|
from django.http import HttpResponse
|
|
from rest_framework.decorators import action
|
|
from django.db import connection
|
|
import tempfile
|
|
import threading
|
|
import logging
|
|
import zipfile
|
|
import os
|
|
import csv
|
|
import redis
|
|
|
|
|
|
|
|
@csrf_exempt
|
|
@action(detail=False, methods=['get','post'])
|
|
def upload_zip(request):
|
|
|
|
db_host = os.getenv('DB_REDIS_HOST')
|
|
db_port = os.getenv('DB_REDIS_PORT')
|
|
r = redis.Redis(host=db_host, port=db_port, decode_responses=True)
|
|
|
|
if request.method == 'GET':
|
|
filezip = r.get('filezip')
|
|
return JsonResponse({
|
|
'process': filezip != None,
|
|
'filename': filezip
|
|
})
|
|
|
|
if request.method == 'POST':
|
|
|
|
status = r.get('filezip')
|
|
if status != None:
|
|
return HttpResponse('Ya existe un proceso en ejecución', status=400)
|
|
|
|
filezip = request.FILES['filezip']
|
|
if filezip.closed == False and hasattr(filezip,'temporary_file_path'):
|
|
# Notificar a Redis que proceso ha iniciado
|
|
r.set('filezip', filezip.name)
|
|
|
|
hilo = threading.Thread(target=procesa_zip, args=(filezip,))
|
|
hilo.start()
|
|
return JsonResponse({
|
|
'ok': True,
|
|
'message': 'Procesando archivo zip'
|
|
})
|
|
|
|
return JsonResponse({
|
|
'ok': False,
|
|
'message': 'Error en archivo zip enviado'
|
|
})
|
|
|
|
|
|
|
|
|
|
def procesa_zip(file_upload):
|
|
carpeta_destino = tempfile.gettempdir()
|
|
|
|
# descomprimir archivo zip
|
|
archivo_zip = zipfile.ZipFile(file_upload.temporary_file_path(), "r")
|
|
password=''
|
|
try:
|
|
# Extraer todos los archivos del archivo ZIP
|
|
archivo_zip.extractall(pwd=password, path=carpeta_destino)
|
|
print("Archivos extraídos con éxito", flush=True)
|
|
except Exception as e:
|
|
print("Error al extraer archivos:", e, flush=True)
|
|
finally:
|
|
archivo_zip.close()
|
|
|
|
# Lista de archivos GTFS para cargar en la base de datos
|
|
gtfs_files = [
|
|
"agency.txt",
|
|
"calendar.txt",
|
|
"feed_info.txt",
|
|
"routes.txt",
|
|
"shapes.txt",
|
|
"stops.txt",
|
|
"stop_times.txt",
|
|
"trips.txt"
|
|
]
|
|
|
|
try:
|
|
# Crear las tablas en el esquema y cargar los datos
|
|
cur = connection.cursor()
|
|
for file in gtfs_files:
|
|
filepath = os.path.join(carpeta_destino, file)
|
|
if os.path.exists(filepath):
|
|
create_and_load_table(filepath, file, cur)
|
|
os.remove(filepath)
|
|
|
|
procesa_tablas_z(cur)
|
|
|
|
cur.close()
|
|
connection.commit()
|
|
|
|
except Exception as e:
|
|
print(f'ERROR: {e}', flush=True)
|
|
finally:
|
|
# notificar a Redis que proceso ha finalizado
|
|
db_host = os.getenv('DB_REDIS_HOST')
|
|
db_port = os.getenv('DB_REDIS_PORT')
|
|
r = redis.Redis(host=db_host, port=db_port, decode_responses=True)
|
|
r.delete('filezip')
|
|
|
|
print('==============', flush=True)
|
|
print(f'fin proceso archivo: {archivo_zip}', flush=True)
|
|
|
|
|
|
|
|
# Función para crear la definición de la tabla como texto y cargar los datos
|
|
def create_and_load_table(filepath, file_name, cursor):
|
|
with open(filepath, 'r', encoding='utf-8-sig') as file:
|
|
reader = csv.reader(file)
|
|
columns = next(reader)
|
|
|
|
table_name = "z_"+file_name.replace('.txt', '')
|
|
column_definitions = ',\n'.join([f"{column_name} TEXT" for column_name in columns])
|
|
|
|
create_table = f"CREATE TABLE IF NOT EXISTS {table_name} (\n{column_definitions}\n);"
|
|
print(f'SQL> {create_table}', flush=True)
|
|
cursor.execute(create_table)
|
|
print('', flush=True)
|
|
|
|
truncate_table = f"truncate table {table_name}; commit;"
|
|
print(f'SQL> {truncate_table}', flush=True)
|
|
cursor.execute(truncate_table)
|
|
print('', flush=True)
|
|
|
|
file.seek(0)
|
|
next(reader)
|
|
|
|
insert_query = f"COPY {table_name} ({', '.join(columns)}) FROM STDIN WITH CSV"
|
|
print(f'SQL> {insert_query}', flush=True)
|
|
cursor.copy_expert(sql=insert_query, file=file)
|
|
print('', flush=True)
|
|
|
|
|
|
def procesa_tablas_z(cursor):
|
|
current_folder = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
with open(os.path.join(current_folder, 'actualiza_GTFS.sql'),'r') as file:
|
|
content = ''.join(file.readlines())
|
|
|
|
arr_sql = content.split('-----')
|
|
for sql in arr_sql:
|
|
sql = sql.strip()
|
|
if sql > ' ':
|
|
print(f'SQL> {sql}', flush=True)
|
|
cursor.execute(sql)
|
|
print('', flush=True)
|