#![allow(
clippy::expect_used,
clippy::unwrap_used,
clippy::panic,
clippy::missing_panics_doc
)]
use std::path::PathBuf;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration;
use stygian_browser::{BrowserConfig, BrowserInstance, WaitUntil};
fn unique_user_data_dir() -> PathBuf {
static COUNTER: AtomicU64 = AtomicU64::new(0);
let n = COUNTER.fetch_add(1, Ordering::Relaxed);
let pid = std::process::id();
std::env::temp_dir().join(format!("stygian-crawllab-{pid}-{n}"))
}
fn test_config() -> BrowserConfig {
let mut cfg = BrowserConfig::builder().headless(true).build();
cfg.launch_timeout = Duration::from_secs(30);
cfg.cdp_timeout = Duration::from_secs(15);
cfg.user_data_dir = Some(unique_user_data_dir());
if let Ok(p) = std::env::var("STYGIAN_CHROME_PATH") {
cfg.chrome_path = Some(PathBuf::from(p));
}
cfg
}
#[tokio::test]
#[ignore = "requires real Chrome binary and network access to crawllab.dev"]
async fn js_inline_renders_content() -> Result<(), Box<dyn std::error::Error>> {
let instance = BrowserInstance::launch(test_config()).await?;
let mut page = instance.new_page().await?;
page.navigate(
"https://crawllab.dev/js/inline",
WaitUntil::NetworkIdle,
Duration::from_secs(20),
)
.await?;
let html = page.content().await?;
assert!(
html.len() > 200,
"JS-rendered page should have ≥ 200 chars of content, got {} bytes",
html.len()
);
assert!(
html.contains("<body"),
"rendered page must include a <body> element"
);
page.close().await?;
instance.shutdown().await?;
Ok(())
}
#[tokio::test]
#[ignore = "requires real Chrome binary and network access to crawllab.dev"]
async fn js_external_renders_content() -> Result<(), Box<dyn std::error::Error>> {
let instance = BrowserInstance::launch(test_config()).await?;
let mut page = instance.new_page().await?;
page.navigate(
"https://crawllab.dev/js/external",
WaitUntil::NetworkIdle,
Duration::from_secs(20),
)
.await?;
let html = page.content().await?;
assert!(
html.len() > 200,
"externally JS-rendered page should have ≥ 200 chars, got {} bytes",
html.len()
);
assert!(
html.contains("<body"),
"rendered page must include a <body> element"
);
page.close().await?;
instance.shutdown().await?;
Ok(())
}
#[tokio::test]
#[ignore = "requires real Chrome binary and network access to crawllab.dev"]
async fn browser_navigates_status_200() -> Result<(), Box<dyn std::error::Error>> {
let instance = BrowserInstance::launch(test_config()).await?;
let mut page = instance.new_page().await?;
page.navigate(
"https://crawllab.dev/200",
WaitUntil::DomContentLoaded,
Duration::from_secs(15),
)
.await?;
let html = page.content().await?;
assert!(
html.contains("<html") || html.contains("<HTML"),
"response should be an HTML document, got: {}",
html.get(..200.min(html.len())).unwrap_or_default()
);
page.close().await?;
instance.shutdown().await?;
Ok(())
}
#[tokio::test]
#[ignore = "requires real Chrome binary and network access to crawllab.dev"]
async fn eval_works_on_crawllab_page() -> Result<(), Box<dyn std::error::Error>> {
let instance = BrowserInstance::launch(test_config()).await?;
let mut page = instance.new_page().await?;
page.navigate(
"https://crawllab.dev/js/inline",
WaitUntil::DomContentLoaded,
Duration::from_secs(15),
)
.await?;
let result: f64 = page.eval("1 + 1").await?;
assert!(
(result - 2.0).abs() < f64::EPSILON,
"JS eval sanity check failed: expected 2, got {result}"
);
let webdriver_hidden: bool = page
.eval("typeof navigator.webdriver === 'undefined' || navigator.webdriver === false")
.await?;
assert!(
webdriver_hidden,
"navigator.webdriver should be hidden by stealth injection"
);
page.close().await?;
instance.shutdown().await?;
Ok(())
}