1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192
//! # google-cloud-bigquery
//!
//! Google Cloud Platform BigQuery Client library.
//!
//! ## Quick Start
//!
//! ### CreateClient
//!
//! The function `create()` will try and read the credentials from a file specified in the environment variable `GOOGLE_APPLICATION_CREDENTIALS`, `GOOGLE_APPLICATION_CREDENTIALS_JSON` or
//! from a metadata server.
//!
//! This is also described in [google-cloud-auth](https://github.com/yoshidan/google-cloud-rust/blob/main/foundation/auth/README.md)
//!
//! ```rust
//! use google_cloud_bigquery::client::{ClientConfig, Client};
//!
//! async fn run() {
//! let (config, project_id) = ClientConfig::new_with_auth().await.unwrap();
//! let client = Client::new(config).await.unwrap();
//! }
//! ```
//!
//! When you can't use the `gcloud` authentication but you have a different way to get your credentials (e.g a different environment variable)
//! you can parse your own version of the 'credentials-file' and use it like that:
//!
//! ```rust
//! use google_cloud_auth::credentials::CredentialsFile;
//! // or google_cloud_bigquery::client::google_cloud_auth::credentials::CredentialsFile
//! use google_cloud_bigquery::client::{ClientConfig, Client};
//!
//! async fn run(cred: CredentialsFile) {
//! let (config, project_id) = ClientConfig::new_with_credentials(cred).await.unwrap();
//! let client = Client::new(config).await.unwrap();
//! }
//! ```
//!
//! ### Read Data
//!
//! #### Query
//! ```rust
//! use google_cloud_bigquery::http::job::query::QueryRequest;
//! use google_cloud_bigquery::query::row::Row;
//! use google_cloud_bigquery::client::Client;
//!
//! async fn run(client: &Client, project_id: &str) {
//! let request = QueryRequest {
//! query: "SELECT * FROM dataset.table".to_string(),
//! ..Default::default()
//! };
//! let mut iter = client.query::<Row>(project_id, request).await.unwrap();
//! while let Some(row) = iter.next().await.unwrap() {
//! let col1 = row.column::<String>(0);
//! let col2 = row.column::<Option<String>>(1);
//! }
//! }
//! ```
//!
//! #### Read Table
//! ```rust
//! use google_cloud_bigquery::storage::row::Row;
//! use google_cloud_bigquery::client::Client;
//! use google_cloud_bigquery::http::table::TableReference;
//!
//! async fn run(client: &Client, project_id: &str) {
//! let table = TableReference {
//! project_id: project_id.to_string(),
//! dataset_id: "dataset".to_string(),
//! table_id: "table".to_string(),
//! };
//! let mut iter = client.read_table::<Row>(&table, None).await.unwrap();
//! while let Some(row) = iter.next().await.unwrap() {
//! let col1 = row.column::<String>(0);
//! let col2 = row.column::<Option<String>>(1);
//! }
//! }
//! ```
//!
//! #### Values
//! Default supported types to decode by `row.column::<T>()` are
//! * String (for STRING)
//! * bool (for BOOL)
//! * i64 (for INT64)
//! * f64 (for FLOAT)
//! * bigdecimal::BigDecimal (for NUMERIC, BIGNUMERIC)
//! * Vec<u8> (for BINARY)
//! * time::OffsetDateTime (for TIMESTAMP)
//! * time::Date (for DATE)
//! * time::Time (for TIME)
//! * T: StructDecodable (for STRUCT)
//! - [Example](https://github.com/yoshidan/google-cloud-rust/blob/082f4553e65ffe54d80a81f316a3eee6ddb10093/bigquery/src/http/bigquery_client.rs#L156)
//! * Option (for all NULLABLE)
//! * Vec (for ARRAY)
//!
//! ### Insert Data
//!
//! #### Table data API
//! ```rust
//! use google_cloud_bigquery::http::tabledata::insert_all::{InsertAllRequest, Row};
//! use google_cloud_bigquery::client::Client;
//!
//! #[derive(serde::Serialize)]
//! pub struct TestData {
//! pub col1: String,
//! #[serde(with = "time::serde::rfc3339::option")]
//! pub col_timestamp: Option<time::OffsetDateTime>,
//! // Must serialize as base64 string to insert binary data
//! // #[serde(default, with = "Base64Standard")]
//! pub col_binary: Vec<u8>
//! }
//!
//! async fn run(client: &Client, project_id: &str, data: TestData) {
//! let data1 = Row {
//! insert_id: None,
//! json: data,
//! };
//! let request = InsertAllRequest {
//! rows: vec![data1],
//! ..Default::default()
//! };
//! let result = client.tabledata().insert(project_id, "dataset", "table", &request).await.unwrap();
//! let error = result.insert_errors;
//! }
//! ```
//! ### Run loading job
//! ex) Loading CSV data from GCS
//! ```rust
//! use google_cloud_bigquery::client::Client;
//! use google_cloud_bigquery::http::bigquery_job_client::BigqueryJobClient;
//! use google_cloud_bigquery::http::job::cancel::CancelJobRequest;
//! use google_cloud_bigquery::http::job::get::GetJobRequest;
//! use google_cloud_bigquery::http::job::get_query_results::GetQueryResultsRequest;
//! use google_cloud_bigquery::http::job::query::QueryRequest;
//! use google_cloud_bigquery::http::job::{Job, JobConfiguration, JobConfigurationLoad, JobReference, JobState, JobType, OperationType, TrainingType, WriteDisposition};
//! use google_cloud_bigquery::http::table::{SourceFormat, TableReference};
//!
//! async fn run(client: &Client, project_id: &str, data_path: &str) {
//! let job = Job {
//! job_reference: JobReference {
//! project_id: project_id.to_string(),
//! job_id: "job_id".to_string(),
//! location: Some("asia-northeast1".to_string())
//! },
//! // CSV configuration
//! configuration: JobConfiguration {
//! job: JobType::Load(JobConfigurationLoad {
//! source_uris: vec![format!("gs://{}.csv",data_path)],
//! source_format: Some(SourceFormat::Csv),
//! field_delimiter: Some("|".to_string()),
//! encoding: Some("UTF-8".to_string()),
//! skip_leading_rows: Some(0),
//! autodetect: Some(true),
//! write_disposition: Some(WriteDisposition::WriteTruncate),
//! destination_table: TableReference {
//! project_id: project_id.to_string(),
//! dataset_id: "dataset".to_string(),
//! table_id: "table".to_string(),
//! },
//! ..Default::default()
//! }),
//! ..Default::default()
//! },
//! ..Default::default()
//! };
//!
//! // Run job
//! let created = client.job().create(&job).await.unwrap();
//!
//! // Check status
//! assert!(created.status.errors.is_none());
//! assert!(created.status.error_result.is_none());
//! assert!(created.status.state == JobState::Running || created.status.state == JobState::Done);
//! }
//! ```
//!
//! ## Features
//! ### HTTP API
//! * [x] [job](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs)
//! * [x] [tabledata](https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata)
//! * [x] [dataset](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets)
//! * [x] [table](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables)
//! * [x] [model](https://cloud.google.com/bigquery/docs/reference/rest/v2/models)
//! * [x] [routine](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines)
//! * [x] [rowAccessPolicy](https://cloud.google.com/bigquery/docs/reference/rest/v2/rowAccessPolicies)
//! ### Streaming
//! * [x] [Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage)
//! * [ ] [Storage Write API](https://cloud.google.com/bigquery/docs/write-api)
pub mod client;
pub mod grpc;
pub mod http;
pub mod query;
pub mod storage;