forked from TDTP/proto_backend
Add fetch.py
parent
9584464df6
commit
80988c72cc
|
@ -0,0 +1,50 @@
|
|||
# coding: utf-8
|
||||
from apscheduler.schedulers.background import BlockingScheduler
|
||||
from datetime import datetime
|
||||
from main import read_file_proto_x_stop, r, tz
|
||||
import requests
|
||||
import logging
|
||||
import json
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def rutina_principal():
|
||||
print(datetime.now(tz))
|
||||
|
||||
fileproto = download_file_backend()
|
||||
if fileproto == None:
|
||||
return False
|
||||
|
||||
namefile = r.get('fileproto')
|
||||
if namefile == fileproto['name']:
|
||||
print(namefile, flush=True)
|
||||
return
|
||||
|
||||
# eliminar toda la informacion actual
|
||||
r.flushdb()
|
||||
|
||||
r.set('fileproto', fileproto['name'])
|
||||
print(fileproto['name'], flush=True)
|
||||
data = read_file_proto_x_stop(fileproto['content'])
|
||||
|
||||
# save data to redis db
|
||||
for key in data:
|
||||
r.set(key, json.dumps(data[key]))
|
||||
|
||||
# print(json.dumps(data['route:549-1'], indent=4))
|
||||
|
||||
|
||||
def download_file_backend():
|
||||
response = requests.get(os.environ.get('BACKEND'))
|
||||
if response.status_code == 200:
|
||||
content_disposition = response.headers['Content-Disposition']
|
||||
nombre_archivo = content_disposition.split('filename=')[1]
|
||||
return { 'name': nombre_archivo, 'content': response.content }
|
||||
return None
|
||||
|
||||
sched = BlockingScheduler()
|
||||
sched.add_job(rutina_principal, 'interval', seconds=int(os.environ.get('INTERVAL'))) #will do the print_t work for every 30 seconds
|
||||
|
||||
if __name__ == '__main__':
|
||||
sched.start()
|
Loading…
Reference in New Issue