Add rustdoc comments to all public items

Document all public structs, enums, traits, functions, fields, and
variants across the library. Adds crate-level documentation with a
usage example, module-level docs, and API reference links where
applicable. Also fixes four bare URL warnings in existing doc comments.
This commit is contained in:
2026-01-31 06:59:08 +00:00
parent f8a4323117
commit c4acf465ba
12 changed files with 260 additions and 9 deletions

View File

@@ -5,13 +5,21 @@ use serde_json::Value;
use crate::types::FunctionResponse;
/// A conversation message containing one or more [`Part`]s.
///
/// See <https://ai.google.dev/api/caching#Content>.
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
pub struct Content {
/// The role of the message author (`user` or `model`).
pub role: Option<Role>,
/// The ordered parts that make up this message.
pub parts: Option<Vec<Part>>,
}
impl Content {
/// Concatenates all [`PartData::Text`] parts into a single string.
///
/// Returns `None` if there are no parts.
pub fn get_text(&self) -> Option<String> {
self.parts.as_ref().map(|parts| {
parts
@@ -24,25 +32,30 @@ impl Content {
})
}
/// Creates a [`Content`] containing a single text part, suitable for use as a system instruction.
pub fn system_prompt<S: Into<String>>(system_prompt: S) -> Self {
Self::builder().add_text_part(system_prompt).build()
}
/// Returns a new [`ContentBuilder`].
pub fn builder() -> ContentBuilder {
ContentBuilder::default()
}
}
/// Builder for constructing [`Content`] values incrementally.
#[derive(Clone, Debug, Default)]
pub struct ContentBuilder {
content: Content,
}
impl ContentBuilder {
/// Appends a text part to this content.
pub fn add_text_part<T: Into<String>>(self, text: T) -> Self {
self.add_part(Part::from_text(text.into()))
}
/// Appends an arbitrary [`Part`] to this content.
pub fn add_part(mut self, part: Part) -> Self {
match &mut self.content.parts {
Some(parts) => parts.push(part),
@@ -51,16 +64,19 @@ impl ContentBuilder {
self
}
/// Sets the [`Role`] for this content.
pub fn role(mut self, role: Role) -> Self {
self.content.role = Some(role);
self
}
/// Consumes the builder and returns the constructed [`Content`].
pub fn build(self) -> Content {
self.content
}
}
/// The role of a message author in a conversation.
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Role {
@@ -90,7 +106,9 @@ impl FromStr for Role {
}
}
/// See https://ai.google.dev/api/caching#Part
/// A single unit of content within a [`Content`] message.
///
/// See <https://ai.google.dev/api/caching#Part>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Part {
@@ -108,29 +126,42 @@ pub struct Part {
pub data: PartData, // Create enum for data.
}
/// The payload of a [`Part`], representing different content types.
///
/// See <https://ai.google.dev/api/caching#Part>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum PartData {
/// Plain text content.
Text(String),
// https://ai.google.dev/api/caching#Blob
/// Binary data encoded inline. See <https://ai.google.dev/api/caching#Blob>.
InlineData {
/// The IANA MIME type of the data (e.g. `"image/png"`).
mime_type: String,
/// Base64-encoded binary data.
data: String,
},
// https://ai.google.dev/api/caching#FunctionCall
/// A function call requested by the model. See <https://ai.google.dev/api/caching#FunctionCall>.
FunctionCall {
/// Optional unique identifier for the function call.
id: Option<String>,
/// The name of the function to call.
name: String,
/// The arguments to pass, as a JSON object.
args: Option<Value>,
},
// https://ai.google.dev/api/caching#FunctionResponse
/// A response to a function call. See <https://ai.google.dev/api/caching#FunctionResponse>.
FunctionResponse(FunctionResponse),
/// A reference to a file stored in the API.
FileData(Value),
/// Code to be executed by the model.
ExecutableCode(Value),
/// The result of executing code.
CodeExecutionResult(Value),
}
impl Part {
/// Creates a [`Part`] containing only text.
pub fn from_text<S: Into<String>>(text: S) -> Self {
Self {
thought: None,

View File

@@ -4,23 +4,32 @@ use crate::error::{Error, Result};
use super::Content;
/// Request body for the `countTokens` endpoint.
///
/// Use [`CountTokensRequest::builder`] for ergonomic construction.
///
/// See <https://ai.google.dev/api/tokens#method:-models.counttokens>.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CountTokensRequest {
/// The content to count tokens for.
pub contents: Content,
}
impl CountTokensRequest {
/// Returns a new [`CountTokensRequestBuilder`].
pub fn builder() -> CountTokensRequestBuilder {
CountTokensRequestBuilder::default()
}
}
/// Builder for [`CountTokensRequest`].
#[derive(Debug, Default)]
pub struct CountTokensRequestBuilder {
contents: Content,
}
impl CountTokensRequestBuilder {
/// Creates a builder pre-populated with a single text prompt.
pub fn from_prompt(prompt: &str) -> Self {
CountTokensRequestBuilder {
contents: Content {
@@ -30,6 +39,7 @@ impl CountTokensRequestBuilder {
}
}
/// Consumes the builder and returns the constructed [`CountTokensRequest`].
pub fn build(self) -> CountTokensRequest {
CountTokensRequest {
contents: self.contents,
@@ -37,6 +47,9 @@ impl CountTokensRequestBuilder {
}
}
/// The raw response from the `countTokens` endpoint, which may be a success or an error.
///
/// Use [`into_result`](CountTokensResponse::into_result) to convert into a standard `Result`.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CountTokensResponse {
@@ -45,6 +58,7 @@ pub enum CountTokensResponse {
}
impl CountTokensResponse {
/// Converts this response into a `Result`, mapping the error variant to [`crate::error::Error`].
pub fn into_result(self) -> Result<CountTokensResponseResult> {
match self {
CountTokensResponse::Ok(result) => Ok(result),
@@ -53,9 +67,12 @@ impl CountTokensResponse {
}
}
/// A successful response from the `countTokens` endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CountTokensResponseResult {
/// The total number of tokens in the input.
pub total_tokens: i32,
/// The total number of billable characters in the input.
pub total_billable_characters: u32,
}

View File

@@ -2,11 +2,16 @@ use std::fmt::Formatter;
use serde::{Deserialize, Serialize};
/// A structured error returned by the Vertex AI / Gemini API.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct VertexApiError {
/// The HTTP status code.
pub code: i32,
/// A human-readable error message.
pub message: String,
/// The gRPC status string (e.g. `"INVALID_ARGUMENT"`).
pub status: String,
/// Optional additional error details.
pub details: Option<Vec<serde_json::Value>>,
}
@@ -19,8 +24,12 @@ impl core::fmt::Display for VertexApiError {
impl std::error::Error for VertexApiError {}
/// A wrapper around [`VertexApiError`] matching the Gemini API error response format.
///
/// The Gemini API nests the error details inside an `error` field.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GeminiApiError {
/// The inner error details.
pub error: VertexApiError,
}

View File

@@ -4,6 +4,11 @@ use serde_json::Value;
use super::{Content, VertexApiError};
use crate::error::Result;
/// Request body for the `generateContent` and `streamGenerateContent` endpoints.
///
/// Use [`GenerateContentRequest::builder`] for ergonomic construction.
///
/// See <https://ai.google.dev/api/generate-content#request-body>.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GenerateContentRequest {
@@ -19,11 +24,13 @@ pub struct GenerateContentRequest {
}
impl GenerateContentRequest {
/// Returns a new [`GenerateContentRequestBuilder`].
pub fn builder() -> GenerateContentRequestBuilder {
GenerateContentRequestBuilder::new()
}
}
/// Builder for [`GenerateContentRequest`].
#[derive(Debug)]
pub struct GenerateContentRequestBuilder {
request: GenerateContentRequest,
@@ -36,36 +43,45 @@ impl GenerateContentRequestBuilder {
}
}
/// Sets the conversation contents.
pub fn contents(mut self, contents: Vec<Content>) -> Self {
self.request.contents = contents;
self
}
/// Sets the generation configuration.
pub fn generation_config(mut self, generation_config: GenerationConfig) -> Self {
self.request.generation_config = Some(generation_config);
self
}
/// Sets the tools available to the model (e.g. function calling, Google Search).
pub fn tools(mut self, tools: Vec<Tools>) -> Self {
self.request.tools = Some(tools);
self
}
/// Sets the safety filter settings.
pub fn safety_settings(mut self, safety_settings: Vec<SafetySetting>) -> Self {
self.request.safety_settings = Some(safety_settings);
self
}
/// Sets a system instruction to guide the model's behavior.
pub fn system_instruction(mut self, system_instruction: Content) -> Self {
self.request.system_instruction = Some(system_instruction);
self
}
/// Consumes the builder and returns the constructed [`GenerateContentRequest`].
pub fn build(self) -> GenerateContentRequest {
self.request
}
}
/// A set of tool declarations the model may use during generation.
///
/// See <https://ai.google.dev/api/caching#Tool>.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct Tools {
#[serde(skip_serializing_if = "Option::is_none")]
@@ -79,13 +95,17 @@ pub struct Tools {
pub google_search: Option<GoogleSearch>,
}
/// Enables the Google Search grounding tool (no configuration required).
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct GoogleSearch {}
/// Configuration for dynamic retrieval in Google Search grounding.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DynamicRetrievalConfig {
/// The retrieval mode (e.g. `"MODE_DYNAMIC"`).
pub mode: String,
/// The threshold for triggering retrieval. Defaults to `0.7`.
#[serde(skip_serializing_if = "Option::is_none")]
pub dynamic_threshold: Option<f32>,
}
@@ -99,12 +119,19 @@ impl Default for DynamicRetrievalConfig {
}
}
/// Google Search retrieval tool with dynamic retrieval configuration.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GoogleSearchRetrieval {
/// Configuration controlling when retrieval is triggered.
pub dynamic_retrieval_config: DynamicRetrievalConfig,
}
/// Parameters that control how the model generates content.
///
/// Use [`GenerationConfig::builder`] for ergonomic construction.
///
/// See <https://ai.google.dev/api/generate-content#generationconfig>.
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct GenerationConfig {
@@ -129,11 +156,13 @@ pub struct GenerationConfig {
}
impl GenerationConfig {
/// Returns a new [`GenerationConfigBuilder`].
pub fn builder() -> GenerationConfigBuilder {
GenerationConfigBuilder::new()
}
}
/// Builder for [`GenerationConfig`].
#[derive(Debug)]
pub struct GenerationConfigBuilder {
generation_config: GenerationConfig,
@@ -191,11 +220,13 @@ impl GenerationConfigBuilder {
self
}
/// Consumes the builder and returns the constructed [`GenerationConfig`].
pub fn build(self) -> GenerationConfig {
self.generation_config
}
}
/// Configuration for the model's "thinking" (chain-of-thought) behavior.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ThinkingConfig {
@@ -206,6 +237,7 @@ pub struct ThinkingConfig {
pub thinking_level: Option<ThinkingLevel>,
}
/// The level of thinking effort the model should use.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum ThinkingLevel {
@@ -214,6 +246,9 @@ pub enum ThinkingLevel {
High,
}
/// A safety filter configuration that controls blocking thresholds for harmful content.
///
/// See <https://ai.google.dev/api/generate-content#safetysetting>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SafetySetting {
@@ -223,6 +258,9 @@ pub struct SafetySetting {
pub method: Option<HarmBlockMethod>,
}
/// Categories of potentially harmful content.
///
/// See <https://ai.google.dev/api/generate-content#harmcategory>.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum HarmCategory {
#[serde(rename = "HARM_CATEGORY_UNSPECIFIED")]
@@ -237,6 +275,7 @@ pub enum HarmCategory {
SexuallyExplicit,
}
/// The threshold at which harmful content is blocked.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum HarmBlockThreshold {
#[serde(rename = "HARM_BLOCK_THRESHOLD_UNSPECIFIED")]
@@ -251,6 +290,7 @@ pub enum HarmBlockThreshold {
BlockNone,
}
/// The method used to evaluate harm (severity-based or probability-based).
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum HarmBlockMethod {
#[serde(rename = "HARM_BLOCK_METHOD_UNSPECIFIED")]
@@ -261,6 +301,9 @@ pub enum HarmBlockMethod {
Probability, // PROBABILITY
}
/// A single candidate response generated by the model.
///
/// See <https://ai.google.dev/api/generate-content#candidate>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Candidate {
@@ -276,6 +319,7 @@ pub struct Candidate {
}
impl Candidate {
/// Returns the concatenated text from this candidate's content, if any.
pub fn get_text(&self) -> Option<String> {
match &self.content {
Some(content) => content.get_text(),
@@ -284,6 +328,7 @@ impl Candidate {
}
}
/// A citation to a source used by the model in its response.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Citation {
@@ -292,12 +337,14 @@ pub struct Citation {
pub uri: Option<String>,
}
/// Metadata containing citations for a candidate's content.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CitationMetadata {
#[serde(alias = "citationSources")]
pub citations: Vec<Citation>,
}
/// A safety rating for a piece of content across a specific harm category.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SafetyRating {
@@ -308,6 +355,7 @@ pub struct SafetyRating {
pub severity_score: Option<f32>,
}
/// Token usage statistics for a generate content request/response.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UsageMetadata {
@@ -316,6 +364,9 @@ pub struct UsageMetadata {
pub total_token_count: Option<u32>,
}
/// A declaration of a function the model may call.
///
/// See <https://ai.google.dev/api/caching#FunctionDeclaration>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FunctionDeclaration {
@@ -332,7 +383,7 @@ pub struct FunctionDeclaration {
pub response_json_schema: Option<Value>,
}
/// See https://ai.google.dev/api/caching#FunctionResponse
/// See <https://ai.google.dev/api/caching#FunctionResponse>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FunctionResponse {
@@ -345,14 +396,14 @@ pub struct FunctionResponse {
pub scheduling: Option<Scheduling>,
}
/// See https://ai.google.dev/api/caching#FunctionResponsePart
/// See <https://ai.google.dev/api/caching#FunctionResponsePart>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum FunctionResponsePart {
InlineData(FunctionResponseBlob),
}
/// See https://ai.google.dev/api/caching#FunctionResponseBlob
/// See <https://ai.google.dev/api/caching#FunctionResponseBlob>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FunctionResponseBlob {
@@ -360,7 +411,7 @@ pub struct FunctionResponseBlob {
pub data: String,
}
/// See https://ai.google.dev/api/caching#Scheduling
/// See <https://ai.google.dev/api/caching#Scheduling>.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Scheduling {
@@ -370,6 +421,7 @@ pub enum Scheduling {
Interrupt,
}
/// A single property within a function's parameter schema.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FunctionParametersProperty {
@@ -377,6 +429,10 @@ pub struct FunctionParametersProperty {
pub description: String,
}
/// The raw response from the `generateContent` endpoint, which may be a success or an error.
///
/// Use [`into_result`](GenerateContentResponse::into_result) to convert into a standard
/// `Result<GenerateContentResponseResult>`.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GenerateContentResponse {
@@ -393,6 +449,7 @@ impl From<GenerateContentResponse> for Result<GenerateContentResponseResult> {
}
}
/// A successful response from the `generateContent` endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GenerateContentResponseResult {
@@ -400,12 +457,14 @@ pub struct GenerateContentResponseResult {
pub usage_metadata: Option<UsageMetadata>,
}
/// An error response from the `generateContent` endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GenerateContentResponseError {
pub error: VertexApiError,
}
impl GenerateContentResponse {
/// Converts this response into a `Result`, mapping the error variant to [`crate::error::Error`].
pub fn into_result(self) -> Result<GenerateContentResponseResult> {
match self {
GenerateContentResponse::Ok(result) => Ok(result),

View File

@@ -1,3 +1,5 @@
//! Request and response types for the Gemini API.
mod common;
mod count_tokens;
mod error;

View File

@@ -2,12 +2,14 @@ use serde::{Deserialize, Serialize};
use serde_with::base64::Base64;
use serde_with::serde_as;
/// Request body for the Imagen image generation `predict` endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PredictImageRequest {
pub instances: Vec<PredictImageRequestPrompt>,
pub parameters: PredictImageRequestParameters,
}
/// A text prompt instance for image generation.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PredictImageRequestPrompt {
/// The text prompt for the image.
@@ -20,6 +22,7 @@ pub struct PredictImageRequestPrompt {
pub prompt: String,
}
/// Parameters controlling image generation behavior.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PredictImageRequestParameters {
@@ -139,6 +142,7 @@ pub struct PredictImageRequestParameters {
pub storage_uri: Option<String>,
}
/// Output format options for generated images.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PredictImageRequestParametersOutputOptions {
@@ -155,11 +159,13 @@ pub struct PredictImageRequestParametersOutputOptions {
pub compression_quality: Option<i32>,
}
/// A successful response from the Imagen `predict` endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PredictImageResponse {
pub predictions: Vec<PredictImageResponsePrediction>,
}
/// A single generated image from the prediction response.
#[serde_as]
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -169,6 +175,7 @@ pub struct PredictImageResponsePrediction {
pub mime_type: String,
}
/// Controls whether generated images may include people.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PersonGeneration {
@@ -177,6 +184,7 @@ pub enum PersonGeneration {
AllowAll,
}
/// Safety filter level for image generation.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PredictImageSafetySetting {

View File

@@ -3,18 +3,27 @@ use serde::{Deserialize, Serialize};
use crate::error::{Error, Result};
use crate::prelude::VertexApiError;
/// Request body for the text embeddings `predict` endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TextEmbeddingRequest {
/// The list of text instances to embed.
pub instances: Vec<TextEmbeddingRequestInstance>,
}
/// A single text instance to embed.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TextEmbeddingRequestInstance {
/// The text content to generate an embedding for.
pub content: String,
/// The task type for the embedding (e.g. `"RETRIEVAL_DOCUMENT"`, `"RETRIEVAL_QUERY"`).
pub task_type: String,
/// An optional title for the content (used with retrieval task types).
pub title: Option<String>,
}
/// The raw response from the text embeddings endpoint, which may be a success or an error.
///
/// Use [`into_result`](TextEmbeddingResponse::into_result) to convert into a standard `Result`.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum TextEmbeddingResponse {
@@ -23,13 +32,16 @@ pub enum TextEmbeddingResponse {
}
impl TextEmbeddingResponse {
/// Converts this response into a `Result`, mapping the error variant to [`crate::error::Error`].
pub fn into_result(self) -> Result<TextEmbeddingResponseOk> {
self.into()
}
}
/// A successful response from the text embeddings endpoint.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TextEmbeddingResponseOk {
/// The embedding predictions, one per input instance.
pub predictions: Vec<TextEmbeddingPrediction>,
}
@@ -42,19 +54,27 @@ impl From<TextEmbeddingResponse> for Result<TextEmbeddingResponseOk> {
}
}
/// A single embedding prediction.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TextEmbeddingPrediction {
/// The embedding result containing the vector and statistics.
pub embeddings: TextEmbeddingResult,
}
/// The embedding vector and associated statistics.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TextEmbeddingResult {
/// Statistics about the embedding computation.
pub statistics: TextEmbeddingStatistics,
/// The embedding vector.
pub values: Vec<f64>,
}
/// Statistics about a text embedding computation.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TextEmbeddingStatistics {
/// Whether the input was truncated to fit the model's context window.
pub truncated: bool,
/// The number of tokens in the input.
pub token_count: u32,
}