gcloud_bigquery/lib.rs
1#![allow(clippy::large_enum_variant)]
2#![allow(clippy::result_large_err)]
3//! # google-cloud-bigquery
4//!
5//! Google Cloud Platform BigQuery Client library.
6//!
7//! ## Quick Start
8//!
9//! ### CreateClient
10//!
11//! The function `create()` will try and read the credentials from a file specified in the environment variable `GOOGLE_APPLICATION_CREDENTIALS`, `GOOGLE_APPLICATION_CREDENTIALS_JSON` or
12//! from a metadata server.
13//!
14//! This is also described in [google-cloud-auth](https://github.com/yoshidan/google-cloud-rust/blob/main/foundation/auth/README.md)
15//!
16//! ```rust
17//! use google_cloud_bigquery::client::{ClientConfig, Client};
18//!
19//! async fn run() {
20//! let (config, project_id) = ClientConfig::new_with_auth().await.unwrap();
21//! let client = Client::new(config).await.unwrap();
22//! }
23//! ```
24//!
25//! When you can't use the `gcloud` authentication but you have a different way to get your credentials (e.g a different environment variable)
26//! you can parse your own version of the 'credentials-file' and use it like that:
27//!
28//! ```rust
29//! use google_cloud_auth::credentials::CredentialsFile;
30//! // or google_cloud_bigquery::client::google_cloud_auth::credentials::CredentialsFile
31//! use google_cloud_bigquery::client::{ClientConfig, Client};
32//!
33//! async fn run(cred: CredentialsFile) {
34//! let (config, project_id) = ClientConfig::new_with_credentials(cred).await.unwrap();
35//! let client = Client::new(config).await.unwrap();
36//! }
37//! ```
38//!
39//! ### Read Data
40//!
41//! #### Query
42//! ```rust
43//! use google_cloud_bigquery::http::job::query::QueryRequest;
44//! use google_cloud_bigquery::query::row::Row;
45//! use google_cloud_bigquery::client::Client;
46//!
47//! async fn run(client: &Client, project_id: &str) {
48//! let request = QueryRequest {
49//! query: "SELECT * FROM dataset.table".to_string(),
50//! ..Default::default()
51//! };
52//! let mut iter = client.query::<Row>(project_id, request).await.unwrap();
53//! while let Some(row) = iter.next().await.unwrap() {
54//! let col1 = row.column::<String>(0);
55//! let col2 = row.column::<Option<String>>(1);
56//! }
57//! }
58//! ```
59//!
60//! #### Read Table
61//! ```rust
62//! use google_cloud_bigquery::storage::row::Row;
63//! use google_cloud_bigquery::client::Client;
64//! use google_cloud_bigquery::http::table::TableReference;
65//!
66//! async fn run(client: &Client, project_id: &str) {
67//! let table = TableReference {
68//! project_id: project_id.to_string(),
69//! dataset_id: "dataset".to_string(),
70//! table_id: "table".to_string(),
71//! };
72//! let mut iter = client.read_table::<Row>(&table, None).await.unwrap();
73//! while let Some(row) = iter.next().await.unwrap() {
74//! let col1 = row.column::<String>(0);
75//! let col2 = row.column::<Option<String>>(1);
76//! }
77//! }
78//! ```
79//!
80//! #### Values
81//! Default supported types to decode by `row.column::<T>()` are
82//! * String (for STRING)
83//! * bool (for BOOL)
84//! * i64 (for INT64)
85//! * f64 (for FLOAT)
86//! * bigdecimal::BigDecimal (for NUMERIC, BIGNUMERIC)
87//! * Vec<u8> (for BINARY)
88//! * time::OffsetDateTime (for TIMESTAMP)
89//! * time::Date (for DATE)
90//! * time::Time (for TIME)
91//! * T: StructDecodable (for STRUCT)
92//! - [Example](https://github.com/yoshidan/google-cloud-rust/blob/082f4553e65ffe54d80a81f316a3eee6ddb10093/bigquery/src/http/bigquery_client.rs#L156)
93//! * Option (for all NULLABLE)
94//! * Vec (for ARRAY)
95//!
96//! ### Insert Data
97//!
98//! #### Table data API
99//! ```rust
100//! use google_cloud_bigquery::http::tabledata::insert_all::{InsertAllRequest, Row};
101//! use google_cloud_bigquery::client::Client;
102//!
103//! #[derive(serde::Serialize)]
104//! pub struct TestData {
105//! pub col1: String,
106//! #[serde(with = "time::serde::rfc3339::option")]
107//! pub col_timestamp: Option<time::OffsetDateTime>,
108//! // Must serialize as base64 string to insert binary data
109//! // #[serde(default, with = "Base64Standard")]
110//! pub col_binary: Vec<u8>
111//! }
112//!
113//! async fn run(client: &Client, project_id: &str, data: TestData) {
114//! let data1 = Row {
115//! insert_id: None,
116//! json: data,
117//! };
118//! let request = InsertAllRequest {
119//! rows: vec![data1],
120//! ..Default::default()
121//! };
122//! let result = client.tabledata().insert(project_id, "dataset", "table", &request).await.unwrap();
123//! let error = result.insert_errors;
124//! }
125//! ```
126//! ### Run loading job
127//! ex) Loading CSV data from GCS
128//! ```rust
129//! use google_cloud_bigquery::client::Client;
130//! use google_cloud_bigquery::http::bigquery_job_client::BigqueryJobClient;
131//! use google_cloud_bigquery::http::job::cancel::CancelJobRequest;
132//! use google_cloud_bigquery::http::job::get::GetJobRequest;
133//! use google_cloud_bigquery::http::job::get_query_results::GetQueryResultsRequest;
134//! use google_cloud_bigquery::http::job::query::QueryRequest;
135//! use google_cloud_bigquery::http::job::{Job, JobConfiguration, JobConfigurationLoad, JobReference, JobState, JobType, OperationType, TrainingType, WriteDisposition};
136//! use google_cloud_bigquery::http::table::{SourceFormat, TableReference};
137//!
138//! async fn run(client: &Client, project_id: &str, data_path: &str) {
139//! let job = Job {
140//! job_reference: JobReference {
141//! project_id: project_id.to_string(),
142//! job_id: "job_id".to_string(),
143//! location: Some("asia-northeast1".to_string())
144//! },
145//! // CSV configuration
146//! configuration: JobConfiguration {
147//! job: JobType::Load(JobConfigurationLoad {
148//! source_uris: vec![format!("gs://{}.csv",data_path)],
149//! source_format: Some(SourceFormat::Csv),
150//! field_delimiter: Some("|".to_string()),
151//! encoding: Some("UTF-8".to_string()),
152//! skip_leading_rows: Some(0),
153//! autodetect: Some(true),
154//! write_disposition: Some(WriteDisposition::WriteTruncate),
155//! destination_table: TableReference {
156//! project_id: project_id.to_string(),
157//! dataset_id: "dataset".to_string(),
158//! table_id: "table".to_string(),
159//! },
160//! ..Default::default()
161//! }),
162//! ..Default::default()
163//! },
164//! ..Default::default()
165//! };
166//!
167//! // Run job
168//! let created = client.job().create(&job).await.unwrap();
169//!
170//! // Check status
171//! assert!(created.status.errors.is_none());
172//! assert!(created.status.error_result.is_none());
173//! assert!(created.status.state == JobState::Running || created.status.state == JobState::Done);
174//! }
175//! ```
176//!
177//! ## Features
178//! ### HTTP API
179//! * [x] [job](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs)
180//! * [x] [tabledata](https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata)
181//! * [x] [dataset](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets)
182//! * [x] [table](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables)
183//! * [x] [model](https://cloud.google.com/bigquery/docs/reference/rest/v2/models)
184//! * [x] [routine](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines)
185//! * [x] [rowAccessPolicy](https://cloud.google.com/bigquery/docs/reference/rest/v2/rowAccessPolicies)
186//! ### Streaming
187//! * [x] [Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage)
188//! * [ ] [Storage Write API](https://cloud.google.com/bigquery/docs/write-api)
189
190pub mod client;
191pub mod grpc;
192pub mod http;
193pub mod query;
194pub mod storage;
195pub mod storage_write;