in api_service/seattle_flu_incidence_mapper/query_model.py [0:0]
def query():
query_json = request.json
file_format ='csv' if 'csv' in request.headers.get('accept', 'json').lower() else 'json'
created = False
container = None
try:
model_id = get_model_id(query_json)
host_job_path = os.path.join(current_app.config['WORKER_JOB_HOST_PATH'], model_id)
job_path = os.path.join(current_app.config['MODEL_JOB_PATH'], model_id)
model: GenericModel = GenericModel.query.filter(GenericModel.id == model_id).order_by(GenericModel.created.desc()).first()
if model is None:
raise NoResultFound(f"Could not find the model with the id {model_id} from query string: {json.dumps(query_json)}")
# define where we want our output written too
# let's cache the users query and model to reduce calls to R. This could change
# for future models who have more interactive stochastic outputs
outfile = hashlib.md5(json.dumps(dict(id=model_id,
created=str(model.created),
file_format=file_format)).encode('ascii')).hexdigest()
full_outpath = os.path.join(job_path, outfile)
if not os.path.exists(full_outpath):
if not os.path.exists(job_path):
os.makedirs(job_path)
# new request or a updated model.
# We have our model, lets check to see if we alread have a worker container
container, socket, created = get_or_create_model_container(job_path, host_job_path, model_id)
execute_model_query(socket, file_format, outfile)
lock_path = full_outpath + ".lock"
time.sleep(0.1)
x = 0
while not(os.path.exists(full_outpath)) and x < 50:
time.sleep(0.1)
x += 1
#if not os.path.exists(full_outpath):
# raise FileNotFoundError
return send_file(
full_outpath,
as_attachment=False,
mimetype='application/json' if file_format == 'json' else 'text/csv'
)
# Rethrow error for 404s
except (NoResultFound, FileNotFoundError) as e:
current_app.logger.exception(e)
raise e
except Exception as e:
current_app.logger.exception(e)
if created and container:
try:
container.stop()
except:
pass