2023-12-10 19:17:03 -03:00
|
|
|
from datetime import datetime
|
|
|
|
from os import getenv
|
|
|
|
import utils.gtfs_realtime_pb2 as gtfs
|
|
|
|
import pytz
|
|
|
|
import redis
|
|
|
|
import requests
|
|
|
|
import json
|
|
|
|
|
|
|
|
db_host = getenv('DB_REDIS_HOST')
|
|
|
|
db_port = getenv('DB_REDIS_PORT')
|
|
|
|
tz = pytz.timezone(getenv('TZ'))
|
|
|
|
r = redis.Redis(host=db_host, port=db_port, decode_responses=True)
|
|
|
|
|
|
|
|
def main():
|
|
|
|
print(datetime.now(tz))
|
|
|
|
|
2023-12-13 10:15:53 -03:00
|
|
|
fileproto = download_file_proto()
|
|
|
|
if fileproto == None:
|
2023-12-10 19:17:03 -03:00
|
|
|
return False
|
2023-12-13 10:15:53 -03:00
|
|
|
|
|
|
|
namefile = r.get('fileproto')
|
|
|
|
if namefile == fileproto['name']:
|
2024-03-02 18:10:51 -03:00
|
|
|
print(namefile, flush=True)
|
2023-12-13 10:15:53 -03:00
|
|
|
return
|
2023-12-10 19:17:03 -03:00
|
|
|
|
2024-03-02 18:10:51 -03:00
|
|
|
# eliminar toda la informacion actual
|
|
|
|
r.flushdb()
|
|
|
|
|
2023-12-13 10:15:53 -03:00
|
|
|
r.set('fileproto', fileproto['name'])
|
2024-03-02 18:10:51 -03:00
|
|
|
print(fileproto['name'], flush=True)
|
2023-12-13 10:15:53 -03:00
|
|
|
data = read_file_proto_x_stop(fileproto['content'])
|
2023-12-10 19:17:03 -03:00
|
|
|
|
|
|
|
# save data to redis db
|
2023-12-12 22:49:30 -03:00
|
|
|
for key in data:
|
|
|
|
r.set(key, json.dumps(data[key]))
|
|
|
|
|
|
|
|
# print(json.dumps(data['route:549-1'], indent=4))
|
2023-12-10 19:17:03 -03:00
|
|
|
|
|
|
|
|
|
|
|
def download_file_proto():
|
|
|
|
response = requests.get('https://app.ilab.cl/gtfs-rt/api/concepcion')
|
|
|
|
if response.status_code == 200:
|
2023-12-13 10:15:53 -03:00
|
|
|
content_disposition = response.headers['Content-Disposition']
|
|
|
|
nombre_archivo = content_disposition.split('filename=')[1]
|
|
|
|
return { 'name': nombre_archivo, 'content': response.content }
|
2023-12-10 19:17:03 -03:00
|
|
|
return None
|
|
|
|
|
2023-12-12 22:49:30 -03:00
|
|
|
|
|
|
|
"""
|
|
|
|
agrupa los trayectos a llegar al paradero
|
|
|
|
"""
|
|
|
|
def read_file_proto_x_stop(content):
|
2023-12-10 19:17:03 -03:00
|
|
|
feed = gtfs.FeedMessage()
|
|
|
|
feed.ParseFromString(content)
|
2023-12-12 22:49:30 -03:00
|
|
|
data_stop = {}
|
2023-12-10 19:17:03 -03:00
|
|
|
|
|
|
|
for entity in feed.entity:
|
|
|
|
if entity.HasField('trip_update'):
|
|
|
|
trip_data = {
|
|
|
|
"id": str(entity.id),
|
2023-12-14 16:13:13 -03:00
|
|
|
"stop_id": None,
|
|
|
|
"stop_sequence": None,
|
2023-12-10 19:17:03 -03:00
|
|
|
"trip_id": entity.trip_update.trip.trip_id,
|
|
|
|
"route_id": entity.trip_update.trip.route_id,
|
|
|
|
"direction_id": entity.trip_update.trip.direction_id,
|
|
|
|
"start_time": entity.trip_update.trip.start_time,
|
|
|
|
"start_date": entity.trip_update.trip.start_date,
|
2023-12-14 16:13:13 -03:00
|
|
|
"arrival_time": None,
|
|
|
|
"hora_llegada": None,
|
2023-12-10 19:17:03 -03:00
|
|
|
"schedule_relationship": entity.trip_update.trip.schedule_relationship,
|
|
|
|
"vehicle_license_plate": entity.trip_update.vehicle.license_plate,
|
|
|
|
"latitude": entity.vehicle.position.latitude,
|
|
|
|
"longitude": entity.vehicle.position.longitude,
|
|
|
|
"bearing": entity.vehicle.position.bearing,
|
|
|
|
"odometer": entity.vehicle.position.odometer,
|
|
|
|
"speed": entity.vehicle.position.speed
|
|
|
|
}
|
|
|
|
|
|
|
|
for update in entity.trip_update.stop_time_update:
|
2023-12-14 16:13:13 -03:00
|
|
|
|
|
|
|
trip_data["stop_id"] = update.stop_id
|
|
|
|
trip_data["stop_sequence"] = update.stop_sequence
|
|
|
|
trip_data["arrival_time"] = update.arrival.time
|
|
|
|
trip_data["hora_llegada"] = datetime.fromtimestamp(update.arrival.time).strftime('%H:%M:%S')
|
|
|
|
|
2024-03-02 18:10:51 -03:00
|
|
|
stop_id = '--'
|
|
|
|
if update.stop_id != None and update.stop_id > '':
|
|
|
|
stop_id = update.stop_id
|
|
|
|
|
|
|
|
key = f'stop_id:{stop_id}'
|
2023-12-15 15:50:23 -03:00
|
|
|
if key in data_stop:
|
2023-12-12 22:49:30 -03:00
|
|
|
data_stop[key].append(trip_data)
|
|
|
|
else:
|
|
|
|
data_stop[key] = [ trip_data ]
|
2024-03-02 18:10:51 -03:00
|
|
|
|
|
|
|
else:
|
|
|
|
key = f'stop_id:none'
|
|
|
|
|
|
|
|
trip_data = {
|
|
|
|
"id": str(entity.id),
|
|
|
|
"trip_id": entity.vehicle.trip.trip_id,
|
|
|
|
"route_id":entity.vehicle.trip.route_id,
|
|
|
|
"direction_id": entity.vehicle.trip.direction_id,
|
|
|
|
"start_time": entity.vehicle.trip.start_time,
|
|
|
|
"start_date": entity.vehicle.trip.start_date,
|
|
|
|
"schedule_relationship": entity.vehicle.trip.schedule_relationship,
|
|
|
|
"vehicle_license_plate": entity.vehicle.vehicle.license_plate,
|
|
|
|
"latitude": entity.vehicle.position.latitude,
|
|
|
|
"longitude": entity.vehicle.position.longitude,
|
|
|
|
"bearing": entity.vehicle.position.bearing,
|
|
|
|
"odometer": entity.vehicle.position.odometer,
|
|
|
|
"speed": entity.vehicle.position.speed
|
|
|
|
}
|
|
|
|
|
|
|
|
if key in data_stop:
|
|
|
|
data_stop[key].append(trip_data)
|
|
|
|
else:
|
|
|
|
data_stop[key] = [ trip_data ]
|
2023-12-14 16:13:13 -03:00
|
|
|
|
2023-12-12 22:49:30 -03:00
|
|
|
# data_dict = trip_data.copy()
|
|
|
|
# data_dict["stop_sequence"] = update.stop_sequence
|
|
|
|
# data_dict["stop_id"] = update.stop_id
|
|
|
|
# data_dict["arrival_time"] = update.arrival.time
|
|
|
|
# data_dict["hora_llegada"] = datetime.fromtimestamp(update.arrival.time).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
# data_list.append(data_dict)
|
|
|
|
|
|
|
|
# else:
|
|
|
|
# trip_data = {
|
|
|
|
# "id": str(entity.id),
|
|
|
|
# "trip_id": entity.vehicle.trip.trip_id,
|
|
|
|
# "route_id":entity.vehicle.trip.route_id,
|
|
|
|
# "direction_id": entity.vehicle.trip.direction_id,
|
|
|
|
# "start_time": entity.vehicle.trip.start_time,
|
|
|
|
# "start_date": entity.vehicle.trip.start_date,
|
|
|
|
# "schedule_relationship": entity.vehicle.trip.schedule_relationship,
|
|
|
|
# "vehicle_license_plate": entity.vehicle.vehicle.license_plate,
|
|
|
|
# "latitude": entity.vehicle.position.latitude,
|
|
|
|
# "longitude": entity.vehicle.position.longitude,
|
|
|
|
# "bearing": entity.vehicle.position.bearing,
|
|
|
|
# "odometer": entity.vehicle.position.odometer,
|
|
|
|
# "speed": entity.vehicle.position.speed
|
|
|
|
# }
|
|
|
|
# data_dict = trip_data.copy()
|
|
|
|
# data_dict["stop_sequence"] = ""
|
|
|
|
# data_dict["stop_id"] = ""
|
|
|
|
# data_dict["arrival_time"] = ""
|
|
|
|
# data_dict["hora_llegada"] = ""
|
|
|
|
# data_list.append(data_dict)
|
|
|
|
|
|
|
|
return data_stop;
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
agrupa los trayectos hechos por linea
|
|
|
|
"""
|
|
|
|
def read_file_proto_x_route(content):
|
|
|
|
feed = gtfs.FeedMessage()
|
|
|
|
feed.ParseFromString(content)
|
|
|
|
data_route = {}
|
|
|
|
|
|
|
|
for entity in feed.entity:
|
|
|
|
if entity.HasField('trip_update'):
|
2023-12-10 19:17:03 -03:00
|
|
|
trip_data = {
|
|
|
|
"id": str(entity.id),
|
2023-12-12 22:49:30 -03:00
|
|
|
"trip_id": entity.trip_update.trip.trip_id,
|
|
|
|
"route_id": entity.trip_update.trip.route_id,
|
|
|
|
"direction_id": entity.trip_update.trip.direction_id,
|
|
|
|
"start_time": entity.trip_update.trip.start_time,
|
|
|
|
"start_date": entity.trip_update.trip.start_date,
|
|
|
|
"schedule_relationship": entity.trip_update.trip.schedule_relationship,
|
|
|
|
"vehicle_license_plate": entity.trip_update.vehicle.license_plate,
|
|
|
|
"latitude": entity.vehicle.position.latitude,
|
|
|
|
"longitude": entity.vehicle.position.longitude,
|
|
|
|
"bearing": entity.vehicle.position.bearing,
|
|
|
|
"odometer": entity.vehicle.position.odometer,
|
|
|
|
"speed": entity.vehicle.position.speed
|
2023-12-10 19:17:03 -03:00
|
|
|
}
|
2023-12-12 22:49:30 -03:00
|
|
|
|
|
|
|
for update in entity.trip_update.stop_time_update:
|
|
|
|
key = f'route:{trip_data["route_id"]}-{trip_data["direction_id"]}'
|
|
|
|
if key in data_route:
|
|
|
|
data_route[key].append(trip_data)
|
|
|
|
else:
|
|
|
|
data_route[key] = [ trip_data ]
|
|
|
|
|
|
|
|
return data_route;
|
2023-12-10 19:17:03 -03:00
|
|
|
|
|
|
|
|
|
|
|
main()
|