throw GenerativeAIException()

in pkgs/google_generative_ai/lib/src/api.dart [83:218]


              throw GenerativeAIException('Response was blocked'
                  '${blockReason != null ? ' due to $blockReason' : ''}'
                  '${blockReasonMessage != null ? ': $blockReasonMessage' : ''}'),
            _ => null,
          },
        [final candidate, ...] => candidate.text,
      };

  /// The function call parts of the first candidate in [candidates], if any.
  ///
  /// Returns an empty list if there are no candidates, or if the first
  /// candidate has no [FunctionCall] parts. There is no error thrown if the
  /// prompt or response were blocked.
  Iterable<FunctionCall> get functionCalls =>
      candidates.firstOrNull?.content.parts.whereType<FunctionCall>() ??
      const [];
}

final class EmbedContentResponse {
  /// The embedding generated from the input content.
  final ContentEmbedding embedding;

  EmbedContentResponse(this.embedding);
}

final class BatchEmbedContentsResponse {
  /// The embeddings generated from the input content for each request, in the
  /// same order as provided in the batch request.
  final List<ContentEmbedding> embeddings;

  BatchEmbedContentsResponse(this.embeddings);
}

final class EmbedContentRequest {
  final Content content;
  final TaskType? taskType;
  final String? title;
  final String? model;
  final int? outputDimensionality;

  EmbedContentRequest(this.content,
      {this.taskType, this.title, this.model, this.outputDimensionality});

  Object toJson({String? defaultModel}) => {
        'content': content.toJson(),
        if (taskType case final taskType?) 'taskType': taskType.toJson(),
        if (title != null) 'title': title,
        if (model ?? defaultModel case final model?) 'model': model,
        if (outputDimensionality != null)
          'outputDimensionality': outputDimensionality,
      };
}

/// An embedding, as defined by a list of values.
final class ContentEmbedding {
  /// The embedding values.
  final List<double> values;

  ContentEmbedding(this.values);
}

/// Feedback metadata of a prompt specified in a [GenerativeModel] request.
final class PromptFeedback {
  /// If set, the prompt was blocked and no candidates are returned.
  ///
  /// Rephrase your prompt.
  final BlockReason? blockReason;

  final String? blockReasonMessage;

  /// Ratings for safety of the prompt.
  ///
  /// There is at most one rating per category.
  final List<SafetyRating> safetyRatings;

  PromptFeedback(this.blockReason, this.blockReasonMessage, this.safetyRatings);
}

/// Metadata on the generation request's token usage.
final class UsageMetadata {
  /// Number of tokens in the prompt.
  final int? promptTokenCount;

  /// Total number of tokens across the generated candidates.
  final int? candidatesTokenCount;

  /// Total token count for the generation request (prompt + candidates).
  final int? totalTokenCount;

  UsageMetadata({
    this.promptTokenCount,
    this.candidatesTokenCount,
    this.totalTokenCount,
  });
}

/// Response candidate generated from a [GenerativeModel].
final class Candidate {
  /// Generated content returned from the model.
  final Content content;

  /// List of ratings for the safety of a response candidate.
  ///
  /// There is at most one rating per category.
  final List<SafetyRating>? safetyRatings;

  /// Citation information for model-generated candidate.
  ///
  /// This field may be populated with recitation information for any text
  /// included in the [content]. These are passages that are "recited" from
  /// copyrighted material in the foundational LLM's training data.
  final CitationMetadata? citationMetadata;

  /// The reason why the model stopped generating tokens.
  ///
  /// If empty, the model has not stopped generating the tokens.
  final FinishReason? finishReason;

  final String? finishMessage;

  // TODO: token count?
  Candidate(this.content, this.safetyRatings, this.citationMetadata,
      this.finishReason, this.finishMessage);

  /// The concatenation of the text parts of [content], if any.
  ///
  /// If this candidate was finished for a reason of [FinishReason.recitation]
  /// or [FinishReason.safety], accessing this text will throw a
  /// [GenerativeAIException].
  ///
  /// If [content] contains any text parts, this value is the concatenation of
  /// the text.
  ///
  /// If [content] does not contain any text parts, this value is `null`.
  String? get text {
    if (finishReason case FinishReason.recitation || FinishReason.safety) {