google_cloud_bigquery/lib.rs
1//! # google-cloud-bigquery
2//!
3//! Google Cloud Platform BigQuery Client library.
4//!
5//! ## Quick Start
6//!
7//! ### CreateClient
8//!
9//! The function `create()` will try and read the credentials from a file specified in the environment variable `GOOGLE_APPLICATION_CREDENTIALS`, `GOOGLE_APPLICATION_CREDENTIALS_JSON` or
10//! from a metadata server.
11//!
12//! This is also described in [google-cloud-auth](https://github.com/yoshidan/google-cloud-rust/blob/main/foundation/auth/README.md)
13//!
14//! ```rust
15//! use google_cloud_bigquery::client::{ClientConfig, Client};
16//!
17//! async fn run() {
18//! let (config, project_id) = ClientConfig::new_with_auth().await.unwrap();
19//! let client = Client::new(config).await.unwrap();
20//! }
21//! ```
22//!
23//! When you can't use the `gcloud` authentication but you have a different way to get your credentials (e.g a different environment variable)
24//! you can parse your own version of the 'credentials-file' and use it like that:
25//!
26//! ```rust
27//! use google_cloud_auth::credentials::CredentialsFile;
28//! // or google_cloud_bigquery::client::google_cloud_auth::credentials::CredentialsFile
29//! use google_cloud_bigquery::client::{ClientConfig, Client};
30//!
31//! async fn run(cred: CredentialsFile) {
32//! let (config, project_id) = ClientConfig::new_with_credentials(cred).await.unwrap();
33//! let client = Client::new(config).await.unwrap();
34//! }
35//! ```
36//!
37//! ### Read Data
38//!
39//! #### Query
40//! ```rust
41//! use google_cloud_bigquery::http::job::query::QueryRequest;
42//! use google_cloud_bigquery::query::row::Row;
43//! use google_cloud_bigquery::client::Client;
44//!
45//! async fn run(client: &Client, project_id: &str) {
46//! let request = QueryRequest {
47//! query: "SELECT * FROM dataset.table".to_string(),
48//! ..Default::default()
49//! };
50//! let mut iter = client.query::<Row>(project_id, request).await.unwrap();
51//! while let Some(row) = iter.next().await.unwrap() {
52//! let col1 = row.column::<String>(0);
53//! let col2 = row.column::<Option<String>>(1);
54//! }
55//! }
56//! ```
57//!
58//! #### Read Table
59//! ```rust
60//! use google_cloud_bigquery::storage::row::Row;
61//! use google_cloud_bigquery::client::Client;
62//! use google_cloud_bigquery::http::table::TableReference;
63//!
64//! async fn run(client: &Client, project_id: &str) {
65//! let table = TableReference {
66//! project_id: project_id.to_string(),
67//! dataset_id: "dataset".to_string(),
68//! table_id: "table".to_string(),
69//! };
70//! let mut iter = client.read_table::<Row>(&table, None).await.unwrap();
71//! while let Some(row) = iter.next().await.unwrap() {
72//! let col1 = row.column::<String>(0);
73//! let col2 = row.column::<Option<String>>(1);
74//! }
75//! }
76//! ```
77//!
78//! #### Values
79//! Default supported types to decode by `row.column::<T>()` are
80//! * String (for STRING)
81//! * bool (for BOOL)
82//! * i64 (for INT64)
83//! * f64 (for FLOAT)
84//! * bigdecimal::BigDecimal (for NUMERIC, BIGNUMERIC)
85//! * Vec<u8> (for BINARY)
86//! * time::OffsetDateTime (for TIMESTAMP)
87//! * time::Date (for DATE)
88//! * time::Time (for TIME)
89//! * T: StructDecodable (for STRUCT)
90//! - [Example](https://github.com/yoshidan/google-cloud-rust/blob/082f4553e65ffe54d80a81f316a3eee6ddb10093/bigquery/src/http/bigquery_client.rs#L156)
91//! * Option (for all NULLABLE)
92//! * Vec (for ARRAY)
93//!
94//! ### Insert Data
95//!
96//! #### Table data API
97//! ```rust
98//! use google_cloud_bigquery::http::tabledata::insert_all::{InsertAllRequest, Row};
99//! use google_cloud_bigquery::client::Client;
100//!
101//! #[derive(serde::Serialize)]
102//! pub struct TestData {
103//! pub col1: String,
104//! #[serde(with = "time::serde::rfc3339::option")]
105//! pub col_timestamp: Option<time::OffsetDateTime>,
106//! // Must serialize as base64 string to insert binary data
107//! // #[serde(default, with = "Base64Standard")]
108//! pub col_binary: Vec<u8>
109//! }
110//!
111//! async fn run(client: &Client, project_id: &str, data: TestData) {
112//! let data1 = Row {
113//! insert_id: None,
114//! json: data,
115//! };
116//! let request = InsertAllRequest {
117//! rows: vec![data1],
118//! ..Default::default()
119//! };
120//! let result = client.tabledata().insert(project_id, "dataset", "table", &request).await.unwrap();
121//! let error = result.insert_errors;
122//! }
123//! ```
124//! ### Run loading job
125//! ex) Loading CSV data from GCS
126//! ```rust
127//! use google_cloud_bigquery::client::Client;
128//! use google_cloud_bigquery::http::bigquery_job_client::BigqueryJobClient;
129//! use google_cloud_bigquery::http::job::cancel::CancelJobRequest;
130//! use google_cloud_bigquery::http::job::get::GetJobRequest;
131//! use google_cloud_bigquery::http::job::get_query_results::GetQueryResultsRequest;
132//! use google_cloud_bigquery::http::job::query::QueryRequest;
133//! use google_cloud_bigquery::http::job::{Job, JobConfiguration, JobConfigurationLoad, JobReference, JobState, JobType, OperationType, TrainingType, WriteDisposition};
134//! use google_cloud_bigquery::http::table::{SourceFormat, TableReference};
135//!
136//! async fn run(client: &Client, project_id: &str, data_path: &str) {
137//! let job = Job {
138//! job_reference: JobReference {
139//! project_id: project_id.to_string(),
140//! job_id: "job_id".to_string(),
141//! location: Some("asia-northeast1".to_string())
142//! },
143//! // CSV configuration
144//! configuration: JobConfiguration {
145//! job: JobType::Load(JobConfigurationLoad {
146//! source_uris: vec![format!("gs://{}.csv",data_path)],
147//! source_format: Some(SourceFormat::Csv),
148//! field_delimiter: Some("|".to_string()),
149//! encoding: Some("UTF-8".to_string()),
150//! skip_leading_rows: Some(0),
151//! autodetect: Some(true),
152//! write_disposition: Some(WriteDisposition::WriteTruncate),
153//! destination_table: TableReference {
154//! project_id: project_id.to_string(),
155//! dataset_id: "dataset".to_string(),
156//! table_id: "table".to_string(),
157//! },
158//! ..Default::default()
159//! }),
160//! ..Default::default()
161//! },
162//! ..Default::default()
163//! };
164//!
165//! // Run job
166//! let created = client.job().create(&job).await.unwrap();
167//!
168//! // Check status
169//! assert!(created.status.errors.is_none());
170//! assert!(created.status.error_result.is_none());
171//! assert!(created.status.state == JobState::Running || created.status.state == JobState::Done);
172//! }
173//! ```
174//!
175//! ## Features
176//! ### HTTP API
177//! * [x] [job](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs)
178//! * [x] [tabledata](https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata)
179//! * [x] [dataset](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets)
180//! * [x] [table](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables)
181//! * [x] [model](https://cloud.google.com/bigquery/docs/reference/rest/v2/models)
182//! * [x] [routine](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines)
183//! * [x] [rowAccessPolicy](https://cloud.google.com/bigquery/docs/reference/rest/v2/rowAccessPolicies)
184//! ### Streaming
185//! * [x] [Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage)
186//! * [ ] [Storage Write API](https://cloud.google.com/bigquery/docs/write-api)
187
188pub mod client;
189pub mod grpc;
190pub mod http;
191pub mod query;
192pub mod storage;
193pub mod storage_write;