use anyhow::Result;
use clap::Parser;
use futures::StreamExt;
use serde::Deserialize;
use std::{
env, fs,
path::{Path, PathBuf},
process::Command,
};
#[derive(Parser, Debug)]
#[command(author, version, about)]
struct CliFlags {
#[arg(short, long)]
path: String,
#[arg(short, long)]
org: String,
#[arg(long)]
http: bool,
#[arg(short, long)]
jj: bool,
#[arg(short, long, default_value_t = false, conflicts_with = "verbose")]
quiet: bool,
#[arg(short, long, default_value_t = false, conflicts_with = "quiet")]
verbose: bool,
#[arg(long, default_value_t = false)]
nofetch: bool,
}
#[derive(Debug)]
enum Transport {
Http,
Ssh,
}
#[derive(Debug)]
enum Vcs {
Git,
JJ,
}
#[derive(Debug)]
enum Verbosity {
Quiet, Normal, Verbose, }
#[derive(Debug, Deserialize, Clone)]
struct GHRepo {
name: String,
ssh_url: String,
clone_url: String,
}
#[derive(Debug)]
struct Config {
org: String,
transport: Transport,
verbosity: Verbosity,
vcs: Vcs,
nofetch: bool,
path: String,
}
impl Config {
pub fn new_from_flags(flags: &CliFlags) -> Config {
let transport = match flags.http {
true => Transport::Http,
false => Transport::Ssh,
};
let verbosity = match flags.quiet {
true => Verbosity::Quiet,
false => match flags.verbose {
true => Verbosity::Verbose,
false => Verbosity::Normal,
},
};
let vcs = match flags.jj {
true => Vcs::JJ,
false => Vcs::Git,
};
Config {
org: flags.org.trim().into(),
transport,
verbosity,
vcs,
nofetch: flags.nofetch,
path: flags.path.clone().trim().into(),
}
}
}
fn main() -> Result<()> {
let cli_flags = CliFlags::parse();
let token = get_github_token(&cli_flags);
if token.is_none() {
println!("Unable to get a GitHub token from the environment");
}
let config = Box::new(Config::new_from_flags(&cli_flags));
let config: &'static Config = Box::leak(config);
match &config.verbosity {
Verbosity::Quiet => {}
_ => {
print!("Getting org repository list... ");
}
}
let repos = match get_org_repositories(config, token) {
Ok(r) => r,
Err(e) => {
eprintln!();
eprintln!("Failed to get org repositories: {}\n", e);
return Err(e);
}
};
match &config.verbosity {
Verbosity::Quiet => {}
_ => {
println!("Complete!");
}
}
fs::create_dir_all(&config.path)?;
let base_path = Box::new(fs::canonicalize(&config.path)?);
let base_path: &'static PathBuf = Box::leak(base_path);
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(async {
match config.nofetch {
true => {
futures::stream::iter(repos)
.filter(|repo| no_existing_repo(base_path, repo.name.clone()))
.map(|repo| clone_one_repo(config, repo))
.buffer_unordered(100)
.for_each(|result| async {
match result {
Ok(_) => {}
Err(e) => {
eprintln!("{}", e);
}
}
})
.await;
}
false => {
futures::stream::iter(repos)
.map(|repo| clone_or_fetch_wrapper(config, base_path, repo))
.buffer_unordered(100)
.for_each(|result| async {
match result {
Ok(_) => {}
Err(e) => {
eprintln!("{}", e);
}
}
})
.await;
}
}
});
Ok(())
}
async fn no_existing_repo(base_path: &Path, name: String) -> bool {
match fs::exists(base_path.join(name)).ok() {
Some(exists) => match exists {
true => false,
false => true,
},
None => true,
}
}
fn get_org_repositories(config: &Config, token: Option<String>) -> Result<Vec<GHRepo>> {
let mut url = format!(
"https://api.github.com/orgs/{}/repos?per_page=100",
config.org
);
let token = token.unwrap_or("".into());
let token_string = format!("Bearer {}", token);
let mut pagination_required = true;
let mut repositories = Vec::new();
while pagination_required {
let mut resp = ureq::get(&url)
.header("User-Agent", "gorc")
.header("Authorization", &token_string)
.header("Accept", "application/vnd.github+json")
.call()?;
repositories.append(&mut resp.body_mut().read_json()?);
let link_header = resp.headers().get("link");
match link_header {
Some(link) => match check_pagination(link.to_str()?) {
Some(next_url) => {
url = next_url;
}
None => {
pagination_required = false;
}
},
None => {
pagination_required = false;
}
}
}
Ok(repositories)
}
fn check_pagination(link_header: &str) -> Option<String> {
let next_link_identifier = "rel=\"next\"";
if link_header.contains(next_link_identifier) {
let parts = link_header.split(",");
for part in parts {
let part = part.trim();
if part.contains(next_link_identifier) {
match part.split_once(";") {
Some(n) => {
let trimmed = n.0.trim().trim_end_matches('>').trim_start_matches('<');
return Some(trimmed.to_owned());
}
None => return None,
}
}
}
}
None
}
fn get_github_token(cli_flags: &CliFlags) -> Option<String> {
let output = Command::new("gh").args(["auth", "token"]).output();
match output {
Ok(token) => {
match String::from_utf8(token.stdout) {
Ok(token) => {
if !token.is_empty() {
let token: String = token.trim().into();
return Some(token);
}
}
Err(e) => {
if cli_flags.verbose {
eprintln!("Error parsing gh auth token output: {e}");
}
}
}
}
Err(e) => {
if cli_flags.verbose {
eprintln!("Error executing gh auth token: {e}");
}
}
}
match env::var("GITHUB_TOKEN") {
Ok(token) => {
let token: String = token.trim().into();
if !token.is_empty() {
return Some(token);
}
}
Err(e) => {
if cli_flags.verbose {
eprintln!("Error reading GITHUB_TOKEN env var: {e}")
}
}
}
match env::var("GITHUB_PAT") {
Ok(token) => {
let token: String = token.trim().into();
if !token.is_empty() {
return Some(token);
}
}
Err(e) => {
if cli_flags.verbose {
eprintln!("Error reading GITHUB_PAT env var: {e}")
}
}
}
None
}
async fn clone_or_fetch_wrapper(
config: &Config,
base_path: &Path,
repo: GHRepo,
) -> Result<std::process::ExitStatus, std::io::Error> {
match fs::exists(base_path.join(&repo.name))? {
true => fetch_one_repo_sync(config, repo).await,
false => clone_one_repo(config, repo).await,
}
}
async fn clone_one_repo(
config: &Config,
repo: GHRepo,
) -> Result<std::process::ExitStatus, std::io::Error> {
let url = match config.transport {
Transport::Http => &repo.clone_url,
Transport::Ssh => &repo.ssh_url,
};
let path = fs::canonicalize(&config.path).unwrap();
match config.verbosity {
Verbosity::Quiet => {}
_ => {
println!("Cloning: {}", &repo.name);
}
}
let result = match config.vcs {
Vcs::Git => {
tokio::process::Command::new("git")
.current_dir(path)
.arg("clone")
.arg(url)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn()?
.wait()
.await
}
Vcs::JJ => {
tokio::process::Command::new("jj")
.current_dir(path)
.arg("git")
.arg("clone")
.arg("--colocate")
.arg(url)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn()?
.wait()
.await
}
};
match config.verbosity {
Verbosity::Quiet => {}
_ => {
println!("Complete: {}", &repo.name);
}
}
result
}
async fn fetch_one_repo_sync(
config: &Config,
repo: GHRepo,
) -> Result<std::process::ExitStatus, std::io::Error> {
let path = fs::canonicalize(&config.path).unwrap().join(&repo.name);
match config.verbosity {
Verbosity::Quiet => {}
_ => {
println!("Fetching: {}", &repo.name);
}
}
let result = match config.vcs {
Vcs::Git => {
tokio::process::Command::new("git")
.current_dir(path)
.arg("fetch")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn()?
.wait()
.await
}
Vcs::JJ => {
tokio::process::Command::new("jj")
.current_dir(path)
.arg("git")
.arg("fetch")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn()?
.wait()
.await
}
};
match config.verbosity {
Verbosity::Quiet => {}
_ => {
println!("Complete: {}", &repo.name);
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_found_link_header() {
let sample = r#"<https://api.github.com/repositories/1300192/issues?page=2>; rel="prev", <https://api.github.com/repositories/1300192/issues?page=4>; rel="next", <https://api.github.com/repositories/1300192/issues?page=515>; rel="last", <https://api.github.com/repositories/1300192/issues?page=1>; rel="first""#;
let next_url = check_pagination(sample);
assert_eq!(
next_url,
Some("https://api.github.com/repositories/1300192/issues?page=4".to_owned())
)
}
#[test]
fn parse_no_found_link_header() {
let sample = r#"<https://api.github.com/repositories/1300192/issues?page=2>; rel="prev", <https://api.github.com/repositories/1300192/issues?page=4>; rel="last", <https://api.github.com/repositories/1300192/issues?page=1>; rel="first""#;
let next_url = check_pagination(sample);
assert_eq!(next_url, None,)
}
}