def run()

in pipeline/function_app.py [0:0]


def run(context):
  input_data = context.get_input()
  logging.info(f"Context {context}")
  logging.info(f"Input data: {input_data}")
  
  sub_tasks = []

  for blob in input_data:
    logging.info(f"Calling sub orchestrator for blob: {blob}")
    sub_tasks.append(context.call_sub_orchestrator("ProcessBlob", blob))

  logging.info(f"Sub tasks: {sub_tasks}")

  # Runs a list of asynchronous tasks in parallel and waits for all of them to complete. In this case, the tasks are sub-orchestrations that process each blob in parallel
  results = yield context.task_all(sub_tasks)
  logging.info(f"Results: {results}")
  return results