def get()

in community/front-end/ofe/website/ghpcfe/views/jobs.py [0:0]


    def get(self, request, pk):
        job = get_object_or_404(Job, pk=pk)
        job.status = "p"
        job.save()
        cluster_id = job.cluster.id

        try:
            user_uid = job.user.socialaccount_set.first().uid
        except AttributeError:
            if job.user.is_superuser:
                user_uid = "0"
            else:
                # User doesn't have a Google SocialAccount.
                messages.error(
                    request,
                    "You are not signed in with a Google Account. This is "
                    "required for job submission.",
                )
                job.status = "n"
                return HttpResponseRedirect(
                    reverse("job-detail", kwargs={"pk": pk})
                )

        def response(message):
            if message.get("cluster_id") != cluster_id:
                logger.error(
                    "Cluster ID mismatch versus callback: expected %s, "
                    "received %s",
                    pk,
                    message.get("cluster_id"),
                )
            if message.get("job_id") != pk:
                logger.error(
                    "Job ID mismatch versus callback:  expected %s, "
                    "received %s",
                    pk,
                    message.get("job_id"),
                )

            job = Job.objects.get(pk=pk)
            job.status = message["status"]
            logger.info(
                "Processing job message, id %d, status %s", pk, job.status
            )

            if "slurm_job_id" in message and not job.slurm_jobid:
                job.slurm_jobid = message["slurm_job_id"]

            if job.status in ["c", "e"]:
                job.runtime = message.get("job_runtime", None)
                job.result_unit = message.get("result_unit", "")
                job.result_value = message.get("result_value", None)
                job.job_cost = (
                    job.number_of_nodes
                    * job.runtime
                    / Decimal(3600)
                    * job.node_price
                )
            job.save()

        # N.B not base64 encoding the job script because the pubsub library uses
        # protobuf anyway
        message_data = {
            "job_id": job.id,
            "login_uid": user_uid,
            "run_script": job.run_script,
            "num_nodes": job.number_of_nodes,
            "partition": job.partition.name,
        }
        if job.application.load_command:
            message_data["load_command"] = job.application.load_command
        if job.ranks_per_node:
            message_data["ranksPerNode"] = job.ranks_per_node
        if job.threads_per_rank:
            message_data["threadsPerRank"] = job.threads_per_rank
        if job.wall_clock_time_limit:
            message_data["wall_limit"] = job.wall_clock_time_limit
        if job.input_data:
            message_data["input_data"] = job.input_data
        if job.result_data:
            message_data["result_data"] = job.result_data
        if job.partition.GPU_per_node:
            message_data["gpus_per_node"] = job.partition.GPU_per_node

        c2.send_command(
            cluster_id, "RUN_JOB", on_response=response, data=message_data
        )
        messages.success(request, "Job sent to Cluster")
        return HttpResponseRedirect(reverse("job-detail", kwargs={"pk": pk}))