posthog_cli/api/
symbol_sets.rs

1use anyhow::{anyhow, Context, Result};
2use rayon::iter::{IntoParallelIterator, ParallelIterator};
3use reqwest::blocking::multipart::{Form, Part};
4use serde::{Deserialize, Serialize};
5use std::collections::HashMap;
6use tracing::{info, warn};
7
8use crate::{
9    invocation_context::context,
10    utils::{files::content_hash, raise_for_err},
11};
12
13const MAX_FILE_SIZE: usize = 100 * 1024 * 1024; // 100 MB
14
15#[derive(Debug, Clone)]
16pub struct SymbolSetUpload {
17    pub chunk_id: String,
18    pub release_id: Option<String>,
19
20    pub data: Vec<u8>,
21}
22
23#[derive(Debug, Serialize, Deserialize, Clone)]
24struct StartUploadResponseData {
25    presigned_url: PresignedUrl,
26    symbol_set_id: String,
27}
28
29#[derive(Serialize, Deserialize, Debug, Clone)]
30pub struct PresignedUrl {
31    pub url: String,
32    pub fields: HashMap<String, String>,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
36struct BulkUploadStartRequest {
37    symbol_sets: Vec<CreateSymbolSetRequest>,
38}
39
40#[derive(Debug, Clone, Serialize, Deserialize)]
41struct BulkUploadStartResponse {
42    id_map: HashMap<String, StartUploadResponseData>,
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize)]
46struct BulkUploadFinishRequest {
47    content_hashes: HashMap<String, String>,
48}
49
50pub fn upload(input_sets: &[SymbolSetUpload], batch_size: usize) -> Result<()> {
51    let upload_requests: Vec<_> = input_sets
52        .iter()
53        .filter(|s| {
54            if s.data.len() > MAX_FILE_SIZE {
55                warn!(
56                    "Skipping symbol set with id: {}, file too large",
57                    s.chunk_id
58                );
59            }
60            s.data.len() <= MAX_FILE_SIZE
61        })
62        .collect();
63
64    for (i, batch) in upload_requests.chunks(batch_size).enumerate() {
65        info!("Starting upload of batch {i}, {} symbol sets", batch.len());
66        let start_response = start_upload(batch)?;
67
68        let id_map: HashMap<_, _> = batch.iter().map(|u| (u.chunk_id.as_str(), u)).collect();
69
70        info!(
71            "Server returned {} upload keys ({} skipped as already present)",
72            start_response.id_map.len(),
73            batch.len() - start_response.id_map.len()
74        );
75
76        let res: Result<HashMap<String, String>> = start_response
77            .id_map
78            .into_par_iter()
79            .map(|(chunk_id, data)| {
80                info!("Uploading chunk {}", chunk_id);
81                let upload = id_map.get(chunk_id.as_str()).ok_or(anyhow!(
82                    "Got a chunk ID back from posthog that we didn't expect!"
83                ))?;
84
85                let content_hash = content_hash([&upload.data]);
86                upload_to_s3(data.presigned_url.clone(), &upload.data)?;
87                Ok((data.symbol_set_id, content_hash))
88            })
89            .collect();
90
91        let content_hashes = res?;
92
93        finish_upload(content_hashes)?;
94    }
95
96    Ok(())
97}
98
99fn start_upload(symbol_sets: &[&SymbolSetUpload]) -> Result<BulkUploadStartResponse> {
100    let client = &context().client;
101
102    let request = BulkUploadStartRequest {
103        symbol_sets: symbol_sets
104            .iter()
105            .map(|s| CreateSymbolSetRequest::new(s))
106            .collect(),
107    };
108
109    let res = client
110        .send_post("error_tracking/symbol_sets/bulk_start_upload", |req| {
111            req.json(&request)
112        })
113        .context("Failed to start upload")?;
114
115    Ok(res.json()?)
116}
117
118fn upload_to_s3(presigned_url: PresignedUrl, data: &[u8]) -> Result<()> {
119    let client = &context().build_http_client()?;
120    let mut last_err = None;
121    let mut delay = std::time::Duration::from_millis(500);
122    for attempt in 1..=3 {
123        let mut form = Form::new();
124        for (key, value) in &presigned_url.fields {
125            form = form.text(key.clone(), value.clone());
126        }
127        let part = Part::bytes(data.to_vec());
128        form = form.part("file", part);
129
130        let res = client.post(&presigned_url.url).multipart(form).send();
131
132        match res {
133            Result::Ok(resp) => {
134                last_err = raise_for_err(resp).err();
135                if last_err.is_none() {
136                    return Ok(());
137                }
138            }
139            Result::Err(e) => {
140                last_err = Some(anyhow!("Failed to upload chunk: {e:?}"));
141            }
142        }
143        if attempt < 3 {
144            warn!("Upload attempt {attempt} failed, retrying in {delay:?}...",);
145            std::thread::sleep(delay);
146            delay *= 2;
147        }
148    }
149    Err(last_err.unwrap_or_else(|| anyhow!("Unknown error during upload")))
150}
151
152fn finish_upload(content_hashes: HashMap<String, String>) -> Result<()> {
153    let client = &context().client;
154    let request = BulkUploadFinishRequest { content_hashes };
155
156    client
157        .send_post("error_tracking/symbol_sets/bulk_finish_upload", |req| {
158            req.json(&request)
159        })
160        .context("Failed to finish upload")?;
161
162    Ok(())
163}
164
165impl SymbolSetUpload {
166    pub fn cheap_clone(&self) -> Self {
167        Self {
168            chunk_id: self.chunk_id.clone(),
169            release_id: self.release_id.clone(),
170            data: vec![],
171        }
172    }
173}
174
175#[derive(Debug, Clone, Serialize, Deserialize)]
176struct CreateSymbolSetRequest {
177    chunk_id: String,
178    release_id: Option<String>,
179    content_hash: String,
180}
181
182impl CreateSymbolSetRequest {
183    pub fn new(inner: &SymbolSetUpload) -> Self {
184        Self {
185            chunk_id: inner.chunk_id.clone(),
186            release_id: inner.release_id.clone(),
187            content_hash: content_hash([&inner.data]),
188        }
189    }
190}