use std::collections::HashSet;
use night_fury_core::BrowserSession;
use tail_fin_common::page::{ensure_on_domain, page_fetch_with_body};
use tail_fin_common::TailFinError;
use crate::auth::{build_headers, extract_ct0};
use crate::graphql::{default_features, resolve_query_id};
use crate::parsing::parse_search_response;
use crate::types::Tweet;
pub async fn search_tweets(
session: &BrowserSession,
query: &str,
count: usize,
) -> Result<Vec<Tweet>, TailFinError> {
ensure_on_domain(session, &["x.com", "twitter.com"]).await?;
let ct0 = extract_ct0(session).await?;
let headers = build_headers(&ct0);
let query_id = resolve_query_id(session, "SearchTimeline", "lZ0GCEojmtQfiUQa5oJSEw").await?;
let variables = serde_json::json!({
"rawQuery": query,
"count": count,
"querySource": "typed_query",
"product": "Latest"
});
let features = default_features();
let url = format!("/i/api/graphql/{}/SearchTimeline", query_id);
let field_toggles = serde_json::json!({
"withArticleRichContentState": true,
"withArticlePlainText": false,
"withGrokAnalyze": false,
"withDisallowedReplyControls": false
});
let body = serde_json::json!({
"variables": variables,
"features": features,
"fieldToggles": field_toggles,
});
let data = page_fetch_with_body(session, &url, "POST", &headers, Some(&body)).await?;
let mut tweets = parse_search_response(&data);
let mut seen = HashSet::new();
tweets.retain(|t| seen.insert(t.id.clone()));
tweets.truncate(count);
Ok(tweets)
}