void submitApplicationCluster()

in flink-kubernetes-operator/src/main/java/org/apache/flink/kubernetes/operator/service/FlinkService.java [54:147]


    void submitApplicationCluster(JobSpec jobSpec, Configuration conf, boolean requireHaMetadata)
            throws Exception;

    boolean isHaMetadataAvailable(Configuration conf);

    boolean atLeastOneCheckpoint(Configuration conf);

    void submitSessionCluster(Configuration conf) throws Exception;

    JobID submitJobToSessionCluster(
            ObjectMeta meta,
            FlinkSessionJobSpec spec,
            JobID jobID,
            Configuration conf,
            @Nullable String savepoint)
            throws Exception;

    boolean isJobManagerPortReady(Configuration config);

    Optional<JobStatusMessage> getJobStatus(Configuration conf, JobID jobId) throws Exception;

    JobResult requestJobResult(Configuration conf, JobID jobID) throws Exception;

    CancelResult cancelJob(FlinkDeployment deployment, SuspendMode suspendMode, Configuration conf)
            throws Exception;

    void deleteClusterDeployment(
            ObjectMeta meta,
            FlinkDeploymentStatus status,
            Configuration conf,
            boolean deleteHaData);

    CancelResult cancelSessionJob(
            FlinkSessionJob sessionJob, SuspendMode suspendMode, Configuration conf)
            throws Exception;

    String triggerSavepoint(
            String jobId,
            org.apache.flink.core.execution.SavepointFormatType savepointFormatType,
            String savepointDirectory,
            Configuration conf)
            throws Exception;

    String triggerCheckpoint(
            String jobId,
            org.apache.flink.core.execution.CheckpointType checkpointType,
            Configuration conf)
            throws Exception;

    Optional<Savepoint> getLastCheckpoint(JobID jobId, Configuration conf);

    SavepointFetchResult fetchSavepointInfo(String triggerId, String jobId, Configuration conf);

    CheckpointFetchResult fetchCheckpointInfo(String triggerId, String jobId, Configuration conf);

    CheckpointStatsResult fetchCheckpointStats(String jobId, Long checkpointId, Configuration conf);

    Tuple2<
                    Optional<CheckpointHistoryWrapper.CompletedCheckpointInfo>,
                    Optional<CheckpointHistoryWrapper.PendingCheckpointInfo>>
            getCheckpointInfo(JobID jobId, Configuration conf) throws Exception;

    void disposeSavepoint(String savepointPath, Configuration conf) throws Exception;

    Map<String, String> getClusterInfo(Configuration conf, @Nullable String jobId) throws Exception;

    PodList getJmPodList(FlinkDeployment deployment, Configuration conf);

    boolean scale(FlinkResourceContext<?> resourceContext, Configuration deployConfig)
            throws Exception;

    Map<String, String> getMetrics(Configuration conf, String jobId, List<String> metricNames)
            throws Exception;

    RestClusterClient<String> getClusterClient(Configuration conf) throws Exception;

    /** Result of a cancel operation. */
    @AllArgsConstructor
    class CancelResult {
        @Getter boolean pending;
        String savepointPath;

        public static CancelResult completed(String path) {
            return new CancelResult(false, path);
        }

        public static CancelResult pending() {
            return new CancelResult(true, null);
        }

        public Optional<String> getSavepointPath() {
            return Optional.ofNullable(savepointPath);
        }
    }