gcp_bigquery_client/model/
dataset.rs

1use crate::error::BQError;
2use crate::model::dataset_reference::DatasetReference;
3use crate::model::table::Table;
4use crate::Client;
5use std::collections::HashMap;
6
7#[derive(Debug, Default, Clone, Serialize, Deserialize)]
8#[serde(rename_all = "camelCase")]
9pub struct Dataset {
10    /// [Required] A reference that identifies the dataset.
11    pub dataset_reference: DatasetReference,
12    /// A descriptive name for the dataset, if one exists.
13    #[serde(skip_serializing_if = "Option::is_none")]
14    pub friendly_name: Option<String>,
15    /// The fully-qualified, unique, opaque ID of the dataset.
16    #[serde(skip_serializing_if = "Option::is_none")]
17    pub id: Option<String>,
18    /// The resource type. This property always returns the value \"bigquery#dataset\".
19    #[serde(skip_serializing_if = "Option::is_none")]
20    pub kind: Option<String>,
21    /// The labels associated with this dataset. You can use these to organize and group your datasets.
22    #[serde(skip_serializing_if = "Option::is_none")]
23    pub labels: Option<std::collections::HashMap<String, String>>,
24    /// The geographic location where the data resides.
25    #[serde(skip_serializing_if = "Option::is_none")]
26    pub location: Option<String>,
27}
28
29impl Dataset {
30    pub fn new(project_id: &str, dataset_id: &str) -> Self {
31        Self {
32            dataset_reference: DatasetReference {
33                dataset_id: dataset_id.into(),
34                project_id: project_id.into(),
35            },
36            friendly_name: None,
37            id: None,
38            kind: None,
39            labels: None,
40            location: None,
41        }
42    }
43
44    /// Returns the project id of this dataset.
45    pub fn project_id(&self) -> &String {
46        &self.dataset_reference.project_id
47    }
48
49    /// Returns the dataset id of this dataset.
50    pub fn dataset_id(&self) -> &String {
51        &self.dataset_reference.dataset_id
52    }
53
54    /// Sets the location of this dataset.
55    /// # Arguments
56    /// * `location` - The location of this dataset
57    pub fn location(mut self, location: &str) -> Self {
58        self.location = Some(location.into());
59        self
60    }
61
62    /// Sets the friendly name of this dataset.
63    /// # Arguments
64    /// * `friendly_name` - The friendly name of this dataset
65    pub fn friendly_name(mut self, friendly_name: &str) -> Self {
66        self.friendly_name = Some(friendly_name.into());
67        self
68    }
69
70    /// Adds a label to this dataset.
71    /// # Arguments
72    /// * `key` - The label name.
73    /// * `value` - The label value.
74    pub fn label(mut self, key: &str, value: &str) -> Self {
75        if let Some(labels) = self.labels.as_mut() {
76            labels.insert(key.into(), value.into());
77        } else {
78            let mut labels = HashMap::default();
79            labels.insert(key.into(), value.into());
80            self.labels = Some(labels);
81        }
82        self
83    }
84
85    /// Creates a new table.
86    /// # Arguments
87    /// * `client` - The client API.
88    /// * `table` - The table definition.
89    pub async fn create_table(&self, client: &Client, table: Table) -> Result<Table, BQError> {
90        client.table().create(table).await
91    }
92
93    /// Deletes an existing table.
94    /// # Arguments
95    /// * `client` - The client API.
96    /// * `delete_contents` - A flag defining if a dataset must be deleted even if it contains some tables, views, ...
97    pub async fn delete(self, client: &Client, delete_contents: bool) -> Result<(), BQError> {
98        client
99            .dataset()
100            .delete(self.project_id(), self.dataset_id(), delete_contents)
101            .await
102    }
103}