forked from TDTP/proto_backend
Update main.py
parent
839fb4a297
commit
9931189560
140
main.py
140
main.py
|
@ -1,20 +1,18 @@
|
||||||
|
# coding: utf-8
|
||||||
|
from apscheduler.schedulers.background import BlockingScheduler
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from os import getenv
|
from main import read_file_proto_x_stop, r, tz
|
||||||
import utils.gtfs_realtime_pb2 as gtfs
|
|
||||||
import pytz
|
|
||||||
import redis
|
|
||||||
import requests
|
import requests
|
||||||
|
import logging
|
||||||
import json
|
import json
|
||||||
|
|
||||||
db_host = getenv('DB_REDIS_HOST')
|
import os
|
||||||
db_port = getenv('DB_REDIS_PORT')
|
|
||||||
tz = pytz.timezone(getenv('TZ'))
|
|
||||||
r = redis.Redis(host=db_host, port=db_port, decode_responses=True)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
|
def rutina_principal():
|
||||||
print(datetime.now(tz))
|
print(datetime.now(tz))
|
||||||
|
|
||||||
fileproto = download_file_proto()
|
fileproto = download_file_backend()
|
||||||
if fileproto == None:
|
if fileproto == None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -34,127 +32,19 @@ def main():
|
||||||
for key in data:
|
for key in data:
|
||||||
r.set(key, json.dumps(data[key]))
|
r.set(key, json.dumps(data[key]))
|
||||||
|
|
||||||
|
# print(json.dumps(data['route:549-1'], indent=4))
|
||||||
|
|
||||||
|
|
||||||
def download_file_proto():
|
def download_file_backend():
|
||||||
response = requests.get('https://app.ilab.cl/gtfs-rt/api/concepcion')
|
response = requests.get(os.environ.get('BACKEND'))
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
content_disposition = response.headers['Content-Disposition']
|
content_disposition = response.headers['Content-Disposition']
|
||||||
nombre_archivo = content_disposition.split('filename=')[1]
|
nombre_archivo = content_disposition.split('filename=')[1]
|
||||||
return { 'name': nombre_archivo, 'content': response.content }
|
return { 'name': nombre_archivo, 'content': response.content }
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
sched = BlockingScheduler()
|
||||||
|
sched.add_job(rutina_principal, 'interval', seconds=int(os.environ.get('INTERVAL'))) #will do the print_t work for every 30 seconds
|
||||||
|
|
||||||
"""
|
if __name__ == '__main__':
|
||||||
agrupa los trayectos a llegar al paradero
|
sched.start()
|
||||||
"""
|
|
||||||
def read_file_proto_x_stop(content):
|
|
||||||
feed = gtfs.FeedMessage()
|
|
||||||
feed.ParseFromString(content)
|
|
||||||
data_stop = {}
|
|
||||||
|
|
||||||
for entity in feed.entity:
|
|
||||||
if entity.HasField('trip_update'):
|
|
||||||
# agregar trayectos con paraderos
|
|
||||||
for update in entity.trip_update.stop_time_update:
|
|
||||||
|
|
||||||
trip_data = {
|
|
||||||
"id": str(entity.id),
|
|
||||||
"stop_id": None,
|
|
||||||
"stop_sequence": None,
|
|
||||||
"trip_id": entity.trip_update.trip.trip_id,
|
|
||||||
"route_id": entity.trip_update.trip.route_id,
|
|
||||||
"direction_id": entity.trip_update.trip.direction_id,
|
|
||||||
"start_time": entity.trip_update.trip.start_time,
|
|
||||||
"start_date": entity.trip_update.trip.start_date,
|
|
||||||
"arrival_time": None,
|
|
||||||
"hora_llegada": None,
|
|
||||||
"schedule_relationship": entity.trip_update.trip.schedule_relationship,
|
|
||||||
"vehicle_license_plate": entity.trip_update.vehicle.license_plate,
|
|
||||||
"latitude": entity.vehicle.position.latitude,
|
|
||||||
"longitude": entity.vehicle.position.longitude,
|
|
||||||
"bearing": entity.vehicle.position.bearing,
|
|
||||||
"odometer": entity.vehicle.position.odometer,
|
|
||||||
"speed": entity.vehicle.position.speed
|
|
||||||
}
|
|
||||||
|
|
||||||
trip_data["stop_id"] = update.stop_id
|
|
||||||
trip_data["stop_sequence"] = update.stop_sequence
|
|
||||||
trip_data["arrival_time"] = update.arrival.time
|
|
||||||
trip_data["hora_llegada"] = datetime.fromtimestamp(update.arrival.time).strftime('%H:%M:%S')
|
|
||||||
|
|
||||||
stop_id = 'none'
|
|
||||||
if update.stop_id != None and update.stop_id > '':
|
|
||||||
stop_id = update.stop_id
|
|
||||||
|
|
||||||
key = f'stop_id:{stop_id}'
|
|
||||||
|
|
||||||
if key in data_stop:
|
|
||||||
data_stop[key].append(trip_data)
|
|
||||||
else:
|
|
||||||
data_stop[key] = [ trip_data ]
|
|
||||||
else:
|
|
||||||
# agregar trayectos sin paradero
|
|
||||||
key = f'stop_id:none'
|
|
||||||
|
|
||||||
trip_data = {
|
|
||||||
"id": str(entity.id),
|
|
||||||
"trip_id": entity.vehicle.trip.trip_id,
|
|
||||||
"route_id":entity.vehicle.trip.route_id,
|
|
||||||
"direction_id": entity.vehicle.trip.direction_id,
|
|
||||||
"start_time": entity.vehicle.trip.start_time,
|
|
||||||
"start_date": entity.vehicle.trip.start_date,
|
|
||||||
"schedule_relationship": entity.vehicle.trip.schedule_relationship,
|
|
||||||
"vehicle_license_plate": entity.vehicle.vehicle.license_plate,
|
|
||||||
"latitude": entity.vehicle.position.latitude,
|
|
||||||
"longitude": entity.vehicle.position.longitude,
|
|
||||||
"bearing": entity.vehicle.position.bearing,
|
|
||||||
"odometer": entity.vehicle.position.odometer,
|
|
||||||
"speed": entity.vehicle.position.speed
|
|
||||||
}
|
|
||||||
|
|
||||||
if key in data_stop:
|
|
||||||
data_stop[key].append(trip_data)
|
|
||||||
else:
|
|
||||||
data_stop[key] = [ trip_data ]
|
|
||||||
|
|
||||||
return data_stop;
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
agrupa los trayectos hechos por linea
|
|
||||||
"""
|
|
||||||
def read_file_proto_x_route(content):
|
|
||||||
feed = gtfs.FeedMessage()
|
|
||||||
feed.ParseFromString(content)
|
|
||||||
data_route = {}
|
|
||||||
|
|
||||||
for entity in feed.entity:
|
|
||||||
if entity.HasField('trip_update'):
|
|
||||||
trip_data = {
|
|
||||||
"id": str(entity.id),
|
|
||||||
"trip_id": entity.trip_update.trip.trip_id,
|
|
||||||
"route_id": entity.trip_update.trip.route_id,
|
|
||||||
"direction_id": entity.trip_update.trip.direction_id,
|
|
||||||
"start_time": entity.trip_update.trip.start_time,
|
|
||||||
"start_date": entity.trip_update.trip.start_date,
|
|
||||||
"schedule_relationship": entity.trip_update.trip.schedule_relationship,
|
|
||||||
"vehicle_license_plate": entity.trip_update.vehicle.license_plate,
|
|
||||||
"latitude": entity.vehicle.position.latitude,
|
|
||||||
"longitude": entity.vehicle.position.longitude,
|
|
||||||
"bearing": entity.vehicle.position.bearing,
|
|
||||||
"odometer": entity.vehicle.position.odometer,
|
|
||||||
"speed": entity.vehicle.position.speed
|
|
||||||
}
|
|
||||||
|
|
||||||
for update in entity.trip_update.stop_time_update:
|
|
||||||
key = f'route:{trip_data["route_id"]}-{trip_data["direction_id"]}'
|
|
||||||
if key in data_route:
|
|
||||||
data_route[key].append(trip_data)
|
|
||||||
else:
|
|
||||||
data_route[key] = [ trip_data ]
|
|
||||||
|
|
||||||
return data_route;
|
|
||||||
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
Loading…
Reference in New Issue