[O] Cleanup codebase

This commit is contained in:
2026-05-07 18:08:33 +00:00
parent 12b75ae6d3
commit 496a57f48d
15 changed files with 2245 additions and 2225 deletions
+13 -477
View File
@@ -344,9 +344,8 @@ impl GitMirror {
}
fn remote_url(&self, name: &str) -> Result<Option<String>> {
let output = Command::new("git")
.arg("--git-dir")
.arg(&self.path)
let output = self
.command()
.args(["remote", "get-url", name])
.output()
.with_context(|| "failed to run git remote get-url")?;
@@ -434,9 +433,8 @@ impl GitMirror {
}
fn is_ancestor(&self, ancestor: &str, descendant: &str) -> Result<bool> {
let status = Command::new("git")
.arg("--git-dir")
.arg(&self.path)
let status = self
.command()
.args(["merge-base", "--is-ancestor", ancestor, descendant])
.status()
.with_context(|| "failed to run git merge-base")?;
@@ -463,9 +461,8 @@ impl GitMirror {
if self.dry_run {
return Ok(String::new());
}
let output = Command::new("git")
.arg("--git-dir")
.arg(&self.path)
let output = self
.command()
.args(args)
.output()
.with_context(|| "failed to run git")?;
@@ -481,6 +478,12 @@ impl GitMirror {
.into())
}
}
fn command(&self) -> Command {
let mut command = Command::new("git");
command.arg("--git-dir").arg(&self.path);
command
}
}
pub fn ls_remote_refs(remote: &RemoteSpec, redactor: &Redactor) -> Result<RemoteRefSnapshot> {
@@ -674,471 +677,4 @@ pub fn safe_remote_name(value: &str) -> String {
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::TempDir;
#[test]
fn remote_names_are_git_friendly() {
assert_eq!(
safe_remote_name("github:alice/project"),
"github_alice_project"
);
}
#[test]
fn redacts_all_secrets() {
let redactor = Redactor::new(vec!["secret".to_string()]);
assert_eq!(
redactor.redact("https://secret@example.test"),
"https://<redacted>@example.test"
);
}
#[test]
fn detects_provider_disabled_repository_errors() {
let error: anyhow::Error = GitCommandError::new(
"git",
"",
"remote: Access to this repository has been disabled by GitHub staff.\nfatal: unable to access 'https://github.com/alice/repo.git/': The requested URL returned error: 403",
)
.into();
assert!(is_disabled_repository_error(&error));
let generic_forbidden: anyhow::Error = GitCommandError::new(
"git",
"",
"fatal: unable to access 'https://github.com/alice/repo.git/': The requested URL returned error: 403",
)
.into();
assert!(!is_disabled_repository_error(&generic_forbidden));
}
#[test]
fn ls_remote_snapshot_changes_when_remote_refs_change() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.tag("v1");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_tag(&fixture.remote_a, "v1");
let remote = fixture.remotes().remove(0);
let redactor = Redactor::new(Vec::new());
let first = ls_remote_refs(&remote, &redactor).unwrap();
let unchanged = ls_remote_refs(&remote, &redactor).unwrap();
assert_eq!(first, unchanged);
assert_eq!(first.refs, 2);
fixture.commit("feature", "feature", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "feature");
let changed = ls_remote_refs(&remote, &redactor).unwrap();
assert_ne!(first.hash, changed.hash);
assert_eq!(changed.refs, 3);
}
#[test]
fn branch_decisions_choose_fast_forward_tip() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let newer = fixture.commit("newer", "newer", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(conflicts.is_empty());
let main = find_branch(&decisions, "main");
assert_eq!(main.sha, newer);
assert_eq!(main.source_remotes, vec!["a".to_string()]);
assert_eq!(main.target_remotes, vec!["b".to_string()]);
assert_ne!(main.sha, base);
}
#[test]
fn branch_decisions_do_not_target_remotes_that_already_match() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(conflicts.is_empty());
let main = find_branch(&decisions, "main");
assert_eq!(main.source_remotes, vec!["a".to_string(), "b".to_string()]);
assert!(main.target_remotes.is_empty());
}
#[test]
fn cached_remote_refs_match_ls_remote_snapshot_after_fetch() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.tag("v1");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_tag(&fixture.remote_a, "v1");
let mirror = fixture.mirror();
let remote = fixture.remotes().remove(0);
assert!(
!mirror
.cached_remote_refs_match(
&remote,
&ls_remote_refs(&remote, &Redactor::new(Vec::new())).unwrap(),
)
.unwrap()
);
mirror.fetch_remote(&remote).unwrap();
let snapshot = ls_remote_refs(&remote, &Redactor::new(Vec::new())).unwrap();
assert!(mirror.cached_remote_refs_match(&remote, &snapshot).unwrap());
fixture.commit("newer", "newer", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
let changed = ls_remote_refs(&remote, &Redactor::new(Vec::new())).unwrap();
assert!(!mirror.cached_remote_refs_match(&remote, &changed).unwrap());
}
#[test]
fn branch_decisions_report_divergent_tips_without_force() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let a_tip = fixture.commit("a", "a", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
let b_tip = fixture.commit("b", "b", 1_700_000_200);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(decisions.is_empty());
assert_eq!(conflicts.len(), 1);
assert_eq!(conflicts[0].branch, "main");
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &a_tip));
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &b_tip));
}
#[test]
fn branch_decisions_force_selects_newest_divergent_tip() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let older = fixture.commit("older", "older", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
let newer = fixture.commit("newer", "newer", 1_700_000_200);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), true).unwrap();
assert!(conflicts.is_empty());
let main = find_branch(&decisions, "main");
assert_eq!(main.sha, newer);
assert_ne!(main.sha, older);
assert_eq!(main.source_remotes, vec!["b".to_string()]);
assert_eq!(main.target_remotes, vec!["a".to_string()]);
}
#[test]
fn push_branches_creates_missing_branch_on_other_remotes() {
let fixture = GitFixture::new();
let expected = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(conflicts.is_empty());
mirror
.push_branches(&fixture.remotes(), &decisions, false)
.unwrap();
assert_eq!(
fixture.remote_ref(&fixture.remote_b, "refs/heads/main"),
expected
);
}
#[test]
fn delete_branches_removes_branch_from_target_remotes() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
mirror
.delete_branches(
&fixture.remotes(),
&[BranchDeletion {
branch: "main".to_string(),
deleted_remotes: vec!["a".to_string()],
target_remotes: vec!["b".to_string()],
}],
)
.unwrap();
assert!(fixture.remote_ref_exists(&fixture.remote_a, "refs/heads/main"));
assert!(!fixture.remote_ref_exists(&fixture.remote_b, "refs/heads/main"));
}
#[test]
fn tag_decisions_mirror_matching_or_missing_tags_and_skip_divergent_tags() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.tag("v1");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
fixture.push_tag(&fixture.remote_a, "v1");
fixture.push_tag(&fixture.remote_b, "v1");
let a_tip = fixture.commit("a", "a", 1_700_000_100);
fixture.tag("release");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_tag(&fixture.remote_a, "release");
fixture.delete_tag("release");
fixture.reset_hard(&base);
let b_tip = fixture.commit("b", "b", 1_700_000_200);
fixture.tag("release");
fixture.push_head(&fixture.remote_b, "main");
fixture.push_tag(&fixture.remote_b, "release");
fixture.delete_tag("missing-on-b");
fixture.reset_hard(&a_tip);
fixture.tag("missing-on-b");
fixture.push_tag(&fixture.remote_a, "missing-on-b");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (tags, conflicts) = mirror.tag_decisions(&fixture.remotes()).unwrap();
assert_eq!(find_tag(&tags, "v1").sha, base);
assert!(find_tag(&tags, "v1").target_remotes.is_empty());
assert_eq!(find_tag(&tags, "missing-on-b").sha, a_tip);
assert_eq!(
find_tag(&tags, "missing-on-b").target_remotes,
vec!["b".to_string()]
);
assert_eq!(conflicts.len(), 1);
assert_eq!(conflicts[0].tag, "release");
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &a_tip));
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &b_tip));
mirror.push_tags(&fixture.remotes(), &tags).unwrap();
assert_eq!(
fixture.remote_ref(&fixture.remote_b, "refs/tags/missing-on-b"),
a_tip
);
}
fn find_branch<'a>(decisions: &'a [BranchDecision], name: &str) -> &'a BranchDecision {
decisions
.iter()
.find(|decision| decision.branch == name)
.unwrap_or_else(|| panic!("missing branch decision for {name}"))
}
fn find_tag<'a>(decisions: &'a [TagDecision], name: &str) -> &'a TagDecision {
decisions
.iter()
.find(|decision| decision.tag == name)
.unwrap_or_else(|| panic!("missing tag decision for {name}"))
}
struct GitFixture {
_temp: TempDir,
work: PathBuf,
mirror_path: PathBuf,
remote_a: PathBuf,
remote_b: PathBuf,
}
impl GitFixture {
fn new() -> Self {
let temp = TempDir::new().unwrap();
let work = temp.path().join("work");
let mirror_path = temp.path().join("mirror.git");
let remote_a = temp.path().join("a.git");
let remote_b = temp.path().join("b.git");
git(None, ["init", "--bare", remote_a.to_str().unwrap()]);
git(None, ["init", "--bare", remote_b.to_str().unwrap()]);
fs::create_dir_all(&work).unwrap();
git(Some(&work), ["init"]);
git(Some(&work), ["config", "user.email", "test@example.test"]);
git(Some(&work), ["config", "user.name", "Test User"]);
git(Some(&work), ["checkout", "-b", "main"]);
Self {
_temp: temp,
work,
mirror_path,
remote_a,
remote_b,
}
}
fn mirror(&self) -> GitMirror {
let mirror =
GitMirror::open(self.mirror_path.clone(), Redactor::new(Vec::new()), false)
.unwrap();
mirror.configure_remotes(&self.remotes()).unwrap();
mirror
}
fn remotes(&self) -> Vec<RemoteSpec> {
vec![
RemoteSpec {
name: "a".to_string(),
url: self.remote_a.to_string_lossy().to_string(),
display: "remote a".to_string(),
},
RemoteSpec {
name: "b".to_string(),
url: self.remote_b.to_string_lossy().to_string(),
display: "remote b".to_string(),
},
]
}
fn fetch_all(&self, mirror: &GitMirror) {
for remote in self.remotes() {
mirror.fetch_remote(&remote).unwrap();
}
}
fn commit(&self, message: &str, contents: &str, timestamp: i64) -> String {
let path = self.work.join("file.txt");
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)
.unwrap();
writeln!(file, "{contents}").unwrap();
git(Some(&self.work), ["add", "file.txt"]);
let date = format!("@{timestamp} +0000");
let output = Command::new("git")
.current_dir(&self.work)
.env("GIT_AUTHOR_DATE", &date)
.env("GIT_COMMITTER_DATE", &date)
.args(["commit", "-m", message])
.output()
.unwrap();
assert_success(&output, "git commit");
self.head()
}
fn head(&self) -> String {
git_output(Some(&self.work), ["rev-parse", "HEAD"])
}
fn reset_hard(&self, sha: &str) {
git(Some(&self.work), ["reset", "--hard", sha]);
}
fn push_head(&self, remote: &Path, branch: &str) {
let refspec = format!("HEAD:refs/heads/{branch}");
git(
Some(&self.work),
["push", remote.to_str().unwrap(), &refspec],
);
}
fn tag(&self, name: &str) {
git(Some(&self.work), ["tag", name]);
}
fn delete_tag(&self, name: &str) {
let _ = Command::new("git")
.current_dir(&self.work)
.args(["tag", "-d", name])
.output()
.unwrap();
}
fn push_tag(&self, remote: &Path, tag: &str) {
let refspec = format!("refs/tags/{tag}:refs/tags/{tag}");
git(
Some(&self.work),
["push", remote.to_str().unwrap(), &refspec],
);
}
fn remote_ref(&self, remote: &Path, reference: &str) -> String {
git_output(
None,
[
"--git-dir",
remote.to_str().unwrap(),
"rev-parse",
reference,
],
)
}
fn remote_ref_exists(&self, remote: &Path, reference: &str) -> bool {
git_command(
None,
[
"--git-dir",
remote.to_str().unwrap(),
"rev-parse",
"--verify",
reference,
],
)
.output()
.unwrap()
.status
.success()
}
}
fn git<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) {
let output = git_command(current_dir, args).output().unwrap();
assert_success(&output, "git");
}
fn git_output<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) -> String {
let output = git_command(current_dir, args).output().unwrap();
assert_success(&output, "git output");
String::from_utf8_lossy(&output.stdout).trim().to_string()
}
fn git_command<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) -> Command {
let mut command = Command::new("git");
command.args(args);
if let Some(current_dir) = current_dir {
command.current_dir(current_dir);
}
command
}
fn assert_success(output: &std::process::Output, label: &str) {
assert!(
output.status.success(),
"{label} failed\nstdout: {}\nstderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
}
}
mod tests;
+465
View File
@@ -0,0 +1,465 @@
use super::*;
use std::io::Write;
use tempfile::TempDir;
#[test]
fn remote_names_are_git_friendly() {
assert_eq!(
safe_remote_name("github:alice/project"),
"github_alice_project"
);
}
#[test]
fn redacts_all_secrets() {
let redactor = Redactor::new(vec!["secret".to_string()]);
assert_eq!(
redactor.redact("https://secret@example.test"),
"https://<redacted>@example.test"
);
}
#[test]
fn detects_provider_disabled_repository_errors() {
let error: anyhow::Error = GitCommandError::new(
"git",
"",
"remote: Access to this repository has been disabled by GitHub staff.\nfatal: unable to access 'https://github.com/alice/repo.git/': The requested URL returned error: 403",
)
.into();
assert!(is_disabled_repository_error(&error));
let generic_forbidden: anyhow::Error = GitCommandError::new(
"git",
"",
"fatal: unable to access 'https://github.com/alice/repo.git/': The requested URL returned error: 403",
)
.into();
assert!(!is_disabled_repository_error(&generic_forbidden));
}
#[test]
fn ls_remote_snapshot_changes_when_remote_refs_change() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.tag("v1");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_tag(&fixture.remote_a, "v1");
let remote = fixture.remotes().remove(0);
let redactor = Redactor::new(Vec::new());
let first = ls_remote_refs(&remote, &redactor).unwrap();
let unchanged = ls_remote_refs(&remote, &redactor).unwrap();
assert_eq!(first, unchanged);
assert_eq!(first.refs, 2);
fixture.commit("feature", "feature", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "feature");
let changed = ls_remote_refs(&remote, &redactor).unwrap();
assert_ne!(first.hash, changed.hash);
assert_eq!(changed.refs, 3);
}
#[test]
fn branch_decisions_choose_fast_forward_tip() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let newer = fixture.commit("newer", "newer", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(conflicts.is_empty());
let main = find_branch(&decisions, "main");
assert_eq!(main.sha, newer);
assert_eq!(main.source_remotes, vec!["a".to_string()]);
assert_eq!(main.target_remotes, vec!["b".to_string()]);
assert_ne!(main.sha, base);
}
#[test]
fn branch_decisions_do_not_target_remotes_that_already_match() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(conflicts.is_empty());
let main = find_branch(&decisions, "main");
assert_eq!(main.source_remotes, vec!["a".to_string(), "b".to_string()]);
assert!(main.target_remotes.is_empty());
}
#[test]
fn cached_remote_refs_match_ls_remote_snapshot_after_fetch() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.tag("v1");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_tag(&fixture.remote_a, "v1");
let mirror = fixture.mirror();
let remote = fixture.remotes().remove(0);
assert!(
!mirror
.cached_remote_refs_match(
&remote,
&ls_remote_refs(&remote, &Redactor::new(Vec::new())).unwrap(),
)
.unwrap()
);
mirror.fetch_remote(&remote).unwrap();
let snapshot = ls_remote_refs(&remote, &Redactor::new(Vec::new())).unwrap();
assert!(mirror.cached_remote_refs_match(&remote, &snapshot).unwrap());
fixture.commit("newer", "newer", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
let changed = ls_remote_refs(&remote, &Redactor::new(Vec::new())).unwrap();
assert!(!mirror.cached_remote_refs_match(&remote, &changed).unwrap());
}
#[test]
fn branch_decisions_report_divergent_tips_without_force() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let a_tip = fixture.commit("a", "a", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
let b_tip = fixture.commit("b", "b", 1_700_000_200);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(decisions.is_empty());
assert_eq!(conflicts.len(), 1);
assert_eq!(conflicts[0].branch, "main");
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &a_tip));
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &b_tip));
}
#[test]
fn branch_decisions_force_selects_newest_divergent_tip() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let older = fixture.commit("older", "older", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
let newer = fixture.commit("newer", "newer", 1_700_000_200);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), true).unwrap();
assert!(conflicts.is_empty());
let main = find_branch(&decisions, "main");
assert_eq!(main.sha, newer);
assert_ne!(main.sha, older);
assert_eq!(main.source_remotes, vec!["b".to_string()]);
assert_eq!(main.target_remotes, vec!["a".to_string()]);
}
#[test]
fn push_branches_creates_missing_branch_on_other_remotes() {
let fixture = GitFixture::new();
let expected = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (decisions, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
assert!(conflicts.is_empty());
mirror
.push_branches(&fixture.remotes(), &decisions, false)
.unwrap();
assert_eq!(
fixture.remote_ref(&fixture.remote_b, "refs/heads/main"),
expected
);
}
#[test]
fn delete_branches_removes_branch_from_target_remotes() {
let fixture = GitFixture::new();
fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
mirror
.delete_branches(
&fixture.remotes(),
&[BranchDeletion {
branch: "main".to_string(),
deleted_remotes: vec!["a".to_string()],
target_remotes: vec!["b".to_string()],
}],
)
.unwrap();
assert!(fixture.remote_ref_exists(&fixture.remote_a, "refs/heads/main"));
assert!(!fixture.remote_ref_exists(&fixture.remote_b, "refs/heads/main"));
}
#[test]
fn tag_decisions_mirror_matching_or_missing_tags_and_skip_divergent_tags() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.tag("v1");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
fixture.push_tag(&fixture.remote_a, "v1");
fixture.push_tag(&fixture.remote_b, "v1");
let a_tip = fixture.commit("a", "a", 1_700_000_100);
fixture.tag("release");
fixture.push_head(&fixture.remote_a, "main");
fixture.push_tag(&fixture.remote_a, "release");
fixture.delete_tag("release");
fixture.reset_hard(&base);
let b_tip = fixture.commit("b", "b", 1_700_000_200);
fixture.tag("release");
fixture.push_head(&fixture.remote_b, "main");
fixture.push_tag(&fixture.remote_b, "release");
fixture.delete_tag("missing-on-b");
fixture.reset_hard(&a_tip);
fixture.tag("missing-on-b");
fixture.push_tag(&fixture.remote_a, "missing-on-b");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (tags, conflicts) = mirror.tag_decisions(&fixture.remotes()).unwrap();
assert_eq!(find_tag(&tags, "v1").sha, base);
assert!(find_tag(&tags, "v1").target_remotes.is_empty());
assert_eq!(find_tag(&tags, "missing-on-b").sha, a_tip);
assert_eq!(
find_tag(&tags, "missing-on-b").target_remotes,
vec!["b".to_string()]
);
assert_eq!(conflicts.len(), 1);
assert_eq!(conflicts[0].tag, "release");
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &a_tip));
assert!(conflicts[0].tips.iter().any(|(_, sha)| sha == &b_tip));
mirror.push_tags(&fixture.remotes(), &tags).unwrap();
assert_eq!(
fixture.remote_ref(&fixture.remote_b, "refs/tags/missing-on-b"),
a_tip
);
}
fn find_branch<'a>(decisions: &'a [BranchDecision], name: &str) -> &'a BranchDecision {
decisions
.iter()
.find(|decision| decision.branch == name)
.unwrap_or_else(|| panic!("missing branch decision for {name}"))
}
fn find_tag<'a>(decisions: &'a [TagDecision], name: &str) -> &'a TagDecision {
decisions
.iter()
.find(|decision| decision.tag == name)
.unwrap_or_else(|| panic!("missing tag decision for {name}"))
}
struct GitFixture {
_temp: TempDir,
work: PathBuf,
mirror_path: PathBuf,
remote_a: PathBuf,
remote_b: PathBuf,
}
impl GitFixture {
fn new() -> Self {
let temp = TempDir::new().unwrap();
let work = temp.path().join("work");
let mirror_path = temp.path().join("mirror.git");
let remote_a = temp.path().join("a.git");
let remote_b = temp.path().join("b.git");
git(None, ["init", "--bare", remote_a.to_str().unwrap()]);
git(None, ["init", "--bare", remote_b.to_str().unwrap()]);
fs::create_dir_all(&work).unwrap();
git(Some(&work), ["init"]);
git(Some(&work), ["config", "user.email", "test@example.test"]);
git(Some(&work), ["config", "user.name", "Test User"]);
git(Some(&work), ["checkout", "-b", "main"]);
Self {
_temp: temp,
work,
mirror_path,
remote_a,
remote_b,
}
}
fn mirror(&self) -> GitMirror {
let mirror =
GitMirror::open(self.mirror_path.clone(), Redactor::new(Vec::new()), false).unwrap();
mirror.configure_remotes(&self.remotes()).unwrap();
mirror
}
fn remotes(&self) -> Vec<RemoteSpec> {
vec![
RemoteSpec {
name: "a".to_string(),
url: self.remote_a.to_string_lossy().to_string(),
display: "remote a".to_string(),
},
RemoteSpec {
name: "b".to_string(),
url: self.remote_b.to_string_lossy().to_string(),
display: "remote b".to_string(),
},
]
}
fn fetch_all(&self, mirror: &GitMirror) {
for remote in self.remotes() {
mirror.fetch_remote(&remote).unwrap();
}
}
fn commit(&self, message: &str, contents: &str, timestamp: i64) -> String {
let path = self.work.join("file.txt");
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)
.unwrap();
writeln!(file, "{contents}").unwrap();
git(Some(&self.work), ["add", "file.txt"]);
let date = format!("@{timestamp} +0000");
let output = Command::new("git")
.current_dir(&self.work)
.env("GIT_AUTHOR_DATE", &date)
.env("GIT_COMMITTER_DATE", &date)
.args(["commit", "-m", message])
.output()
.unwrap();
assert_success(&output, "git commit");
self.head()
}
fn head(&self) -> String {
git_output(Some(&self.work), ["rev-parse", "HEAD"])
}
fn reset_hard(&self, sha: &str) {
git(Some(&self.work), ["reset", "--hard", sha]);
}
fn push_head(&self, remote: &Path, branch: &str) {
let refspec = format!("HEAD:refs/heads/{branch}");
git(
Some(&self.work),
["push", remote.to_str().unwrap(), &refspec],
);
}
fn tag(&self, name: &str) {
git(Some(&self.work), ["tag", name]);
}
fn delete_tag(&self, name: &str) {
let _ = Command::new("git")
.current_dir(&self.work)
.args(["tag", "-d", name])
.output()
.unwrap();
}
fn push_tag(&self, remote: &Path, tag: &str) {
let refspec = format!("refs/tags/{tag}:refs/tags/{tag}");
git(
Some(&self.work),
["push", remote.to_str().unwrap(), &refspec],
);
}
fn remote_ref(&self, remote: &Path, reference: &str) -> String {
git_output(
None,
[
"--git-dir",
remote.to_str().unwrap(),
"rev-parse",
reference,
],
)
}
fn remote_ref_exists(&self, remote: &Path, reference: &str) -> bool {
git_command(
None,
[
"--git-dir",
remote.to_str().unwrap(),
"rev-parse",
"--verify",
reference,
],
)
.output()
.unwrap()
.status
.success()
}
}
fn git<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) {
let output = git_command(current_dir, args).output().unwrap();
assert_success(&output, "git");
}
fn git_output<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) -> String {
let output = git_command(current_dir, args).output().unwrap();
assert_success(&output, "git output");
String::from_utf8_lossy(&output.stdout).trim().to_string()
}
fn git_command<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) -> Command {
let mut command = Command::new("git");
command.args(args);
if let Some(current_dir) = current_dir {
command.current_dir(current_dir);
}
command
}
fn assert_success(output: &std::process::Output, label: &str) {
assert!(
output.status.success(),
"{label} failed\nstdout: {}\nstderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
}
+3 -771
View File
@@ -10,11 +10,6 @@ use dialoguer::{Confirm, Input, Password, Select, theme::ColorfulTheme};
use reqwest::blocking::Client;
use url::Url;
#[cfg(test)]
use anyhow::bail;
#[cfg(test)]
use std::io::{BufRead, Write};
use crate::config::{
Config, EndpointConfig, MirrorConfig, NamespaceKind, ProviderKind, SiteConfig, TokenConfig,
Visibility, WebhookConfig,
@@ -569,382 +564,9 @@ fn pat_instruction_lines(provider: &ProviderKind, base_url: &str) -> Vec<String>
}
#[cfg(test)]
pub fn run_config_wizard_with_io<R, W>(
mut config: Config,
reader: &mut R,
writer: &mut W,
) -> Result<Config>
where
R: BufRead,
W: Write,
{
writeln!(writer, "git-sync configuration wizard")?;
if config.mirrors.is_empty() {
add_sync_group(reader, writer, &mut config)?;
write_sync_groups(&config, writer)?;
} else {
write_sync_groups(&config, writer)?;
}
loop {
match prompt_wizard_action(reader, writer)? {
WizardAction::AddSyncGroup => {
add_sync_group(reader, writer, &mut config)?;
write_sync_groups(&config, writer)?;
}
WizardAction::DeleteSyncGroup => {
if delete_sync_group(reader, writer, &mut config)? {
write_sync_groups(&config, writer)?;
}
}
WizardAction::Done => break,
}
}
Ok(config)
}
mod test_io;
#[cfg(test)]
fn add_sync_group<R, W>(reader: &mut R, writer: &mut W, config: &mut Config) -> Result<()>
where
R: BufRead,
W: Write,
{
let mut endpoints = Vec::new();
let first = prompt_target(reader, writer, "Profile/org URL")?;
endpoints.push(ensure_credentials(config, first, reader, writer)?);
let second = prompt_target(reader, writer, "Profile/org URL to sync with")?;
endpoints.push(ensure_credentials(config, second, reader, writer)?);
while prompt_bool(
reader,
writer,
"Add a third endpoint for 3-way sync?",
false,
)? {
let next = prompt_target(reader, writer, "Additional profile/org URL")?;
endpoints.push(ensure_credentials(config, next, reader, writer)?);
}
config.upsert_mirror(MirrorConfig {
name: next_mirror_name(config),
endpoints,
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
});
prompt_webhook_setup(reader, writer, config)?;
Ok(())
}
#[cfg(test)]
fn prompt_webhook_setup<R, W>(reader: &mut R, writer: &mut W, config: &mut Config) -> Result<()>
where
R: BufRead,
W: Write,
{
if config
.webhook
.as_ref()
.is_some_and(|webhook| webhook.install)
{
writeln!(writer, "Webhooks already enabled.")?;
return Ok(());
}
writeln!(
writer,
"Install webhooks? Strongly recommended because immediate sync greatly reduces conflicts."
)?;
if !prompt_bool(reader, writer, "Install webhook?", true)? {
return Ok(());
}
let url = prompt_required(reader, writer, "Webhook URL reachable by providers")?;
if let Err(error) = validate_url(&url) {
bail!(error);
}
let full_sync_interval_minutes = if prompt_bool(
reader,
writer,
"Run periodic full sync while serve is running?",
true,
)? {
Some(
prompt_with_default(reader, writer, "Full sync interval in minutes", "60")?
.parse::<u64>()
.context("full sync interval must be a number")?,
)
} else {
None
};
config.webhook = Some(WebhookConfig {
install: true,
url,
secret: TokenConfig::Value("test-webhook-secret".to_string()),
full_sync_interval_minutes,
reachability_check_interval_minutes: Some(15),
});
Ok(())
}
#[cfg(test)]
fn prompt_wizard_action<R, W>(reader: &mut R, writer: &mut W) -> Result<WizardAction>
where
R: BufRead,
W: Write,
{
loop {
writeln!(writer, "What would you like to do?")?;
writeln!(writer, " 1. Add another sync group")?;
writeln!(writer, " 2. Delete an existing group")?;
writeln!(writer, " 3. Done")?;
write!(writer, "Choose an option: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_ascii_lowercase();
match value.as_str() {
"1" | "add" | "add another sync group" => return Ok(WizardAction::AddSyncGroup),
"2" | "delete" | "delete an existing group" => {
return Ok(WizardAction::DeleteSyncGroup);
}
"3" | "done" | "finish" => return Ok(WizardAction::Done),
_ => writeln!(writer, "Enter 1, 2, or 3.")?,
}
}
}
#[cfg(test)]
fn delete_sync_group<R, W>(reader: &mut R, writer: &mut W, config: &mut Config) -> Result<bool>
where
R: BufRead,
W: Write,
{
if config.mirrors.is_empty() {
writeln!(writer, "No sync groups to delete.")?;
return Ok(false);
}
loop {
writeln!(writer, "Delete sync group")?;
for (index, option) in sync_group_summaries(config).iter().enumerate() {
writeln!(writer, " {}. {}", index + 1, option)?;
}
writeln!(writer, " {}. Back", config.mirrors.len() + 1)?;
write!(writer, "Choose a sync group: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_ascii_lowercase();
if value == "b" || value == "back" {
return Ok(false);
}
match value.parse::<usize>() {
Ok(index) if (1..=config.mirrors.len()).contains(&index) => {
let name = config.mirrors[index - 1].name.clone();
config.remove_mirror(&name)?;
writeln!(writer, "deleted sync group {index}")?;
return Ok(true);
}
Ok(index) if index == config.mirrors.len() + 1 => return Ok(false),
_ => writeln!(writer, "Enter a sync group number, or choose Back.")?,
}
}
}
#[cfg(test)]
fn prompt_target<R, W>(reader: &mut R, writer: &mut W, prompt: &str) -> Result<ProfileTarget>
where
R: BufRead,
W: Write,
{
let url = prompt_required(reader, writer, prompt)?;
let parsed = parse_profile_url(&url)?;
let provider = known_provider_from_host(&parsed.host).unwrap_or_else(|| {
prompt_provider(reader, writer, &parsed.base_url).expect("provider prompt failed")
});
Ok(ProfileTarget {
base_url: parsed.base_url,
provider,
namespace: parsed.namespace,
kind: None,
})
}
#[cfg(test)]
fn ensure_credentials<R, W>(
config: &mut Config,
target: ProfileTarget,
reader: &mut R,
writer: &mut W,
) -> Result<EndpointConfig>
where
R: BufRead,
W: Write,
{
if let Some(site) = config.sites.iter().find(|site| {
site.provider == target.provider
&& trim_url_end(&site.base_url) == trim_url_end(&target.base_url)
}) {
let kind = target.kind.clone().unwrap_or_else(|| {
prompt_namespace_kind(reader, writer, &target.namespace).expect("kind prompt failed")
});
let endpoint = target_endpoint(&target, kind, site.name.clone());
writeln!(
writer,
"Using existing credentials for {}",
target_display(&target)
)?;
return Ok(endpoint);
}
for line in pat_instruction_lines(&target.provider, &target.base_url) {
writeln!(writer, "{line}")?;
}
let token = prompt_required(reader, writer, "PAT token")?;
let site = SiteConfig {
name: default_site_name(config, &target.base_url, &target.provider),
provider: target.provider.clone(),
base_url: target.base_url.clone(),
api_url: None,
token: TokenConfig::Value(token),
git_username: None,
};
let site_name = site.name.clone();
config.upsert_site(site);
let kind = target.kind.clone().unwrap_or_else(|| {
prompt_namespace_kind(reader, writer, &target.namespace).expect("kind prompt failed")
});
Ok(target_endpoint(&target, kind, site_name))
}
#[cfg(test)]
fn prompt_provider<R, W>(reader: &mut R, writer: &mut W, base_url: &str) -> Result<ProviderKind>
where
R: BufRead,
W: Write,
{
loop {
let value = prompt_required(reader, writer, &format!("Provider for {base_url}"))?;
match value.to_ascii_lowercase().as_str() {
"github" => return Ok(ProviderKind::Github),
"gitlab" => return Ok(ProviderKind::Gitlab),
"gitea" => return Ok(ProviderKind::Gitea),
"forgejo" => return Ok(ProviderKind::Forgejo),
_ => writeln!(
writer,
"Provider must be github, gitlab, gitea, or forgejo."
)?,
}
}
}
#[cfg(test)]
fn prompt_namespace_kind<R, W>(
reader: &mut R,
writer: &mut W,
namespace: &str,
) -> Result<NamespaceKind>
where
R: BufRead,
W: Write,
{
loop {
let value = prompt_with_default(reader, writer, &format!("What is {namespace}?"), "user")?;
match value.to_ascii_lowercase().as_str() {
"user" => return Ok(NamespaceKind::User),
"org" | "organization" => return Ok(NamespaceKind::Org),
"group" => return Ok(NamespaceKind::Group),
_ => writeln!(writer, "Namespace kind must be user, org, or group.")?,
}
}
}
#[cfg(test)]
fn write_sync_groups<W>(config: &Config, writer: &mut W) -> Result<()>
where
W: Write,
{
writeln!(writer, "Sync groups")?;
if config.mirrors.is_empty() {
writeln!(writer, "No sync groups configured.")?;
return Ok(());
}
for (index, mirror) in config.mirrors.iter().enumerate() {
writeln!(
writer,
"{}. {}",
index + 1,
sync_group_summary(config, mirror)
)?;
}
Ok(())
}
#[cfg(test)]
fn prompt_bool<R, W>(reader: &mut R, writer: &mut W, label: &str, default: bool) -> Result<bool>
where
R: BufRead,
W: Write,
{
let default_label = if default { "Y/n" } else { "y/N" };
loop {
write!(writer, "{label} [{default_label}]: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_ascii_lowercase();
match value.as_str() {
"" => return Ok(default),
"y" | "yes" | "true" => return Ok(true),
"n" | "no" | "false" => return Ok(false),
_ => writeln!(writer, "Enter yes or no.")?,
}
}
}
#[cfg(test)]
fn prompt_required<R, W>(reader: &mut R, writer: &mut W, label: &str) -> Result<String>
where
R: BufRead,
W: Write,
{
loop {
write!(writer, "{label}: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_string();
if !value.is_empty() {
return Ok(value);
}
writeln!(writer, "A value is required.")?;
}
}
#[cfg(test)]
fn prompt_with_default<R, W>(
reader: &mut R,
writer: &mut W,
label: &str,
default: &str,
) -> Result<String>
where
R: BufRead,
W: Write,
{
write!(writer, "{label} [{default}]: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_string();
if value.is_empty() {
Ok(default.to_string())
} else {
Ok(value)
}
}
#[cfg(test)]
fn read_line<R>(reader: &mut R) -> Result<String>
where
R: BufRead,
{
let mut value = String::new();
let bytes = reader.read_line(&mut value)?;
if bytes == 0 {
bail!("unexpected end of input while reading interactive configuration");
}
Ok(value)
}
use test_io::*;
fn parse_profile_url(value: &str) -> Result<ParsedProfileUrl> {
let normalized = ensure_url_scheme(value);
@@ -1291,394 +913,4 @@ fn generate_webhook_secret() -> String {
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
#[test]
fn wizard_builds_sync_group_from_profile_urls() {
let input = [
"https://github.com/hykilpikonna",
"gh-token",
"",
"https://gitea.example.test/azalea",
"gt-token",
"",
"n",
"n",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config =
run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
assert_eq!(config.sites.len(), 2);
assert_eq!(config.sites[0].name, "github");
assert_eq!(config.sites[0].provider, ProviderKind::Github);
assert_eq!(config.sites[0].base_url, "https://github.com");
assert_eq!(
config.sites[0].token,
TokenConfig::Value("gh-token".to_string())
);
assert_eq!(config.sites[1].name, "gitea-example-test");
assert_eq!(config.sites[1].provider, ProviderKind::Gitea);
assert_eq!(config.sites[1].base_url, "https://gitea.example.test");
assert_eq!(config.mirrors.len(), 1);
assert_eq!(config.mirrors[0].name, "sync-1");
assert_eq!(config.mirrors[0].endpoints.len(), 2);
assert_eq!(config.mirrors[0].endpoints[0].site, "github");
assert_eq!(config.mirrors[0].endpoints[0].kind, NamespaceKind::User);
assert_eq!(config.mirrors[0].endpoints[0].namespace, "hykilpikonna");
assert_eq!(config.mirrors[0].endpoints[1].site, "gitea-example-test");
assert_eq!(config.mirrors[0].endpoints[1].namespace, "azalea");
assert!(config.mirrors[0].create_missing);
assert_eq!(config.mirrors[0].visibility, Visibility::Private);
assert!(!config.mirrors[0].allow_force);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("1. github.com/hykilpikonna <-> gitea.example.test/azalea"));
assert!(output.contains("Add another sync group"));
assert!(output.contains("Delete an existing group"));
assert!(output.contains("Done"));
}
#[test]
fn wizard_can_build_three_way_sync() {
let input = [
"https://github.com/alice",
"gh-token",
"",
"https://gitlab.com/alice",
"gl-token",
"",
"y",
"https://gitea.example.test/alice",
"gt-token",
"",
"n",
"n",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config =
run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
assert_eq!(config.mirrors.len(), 1);
assert_eq!(config.mirrors[0].endpoints.len(), 3);
assert_eq!(config.sites.len(), 3);
}
#[test]
fn wizard_can_enable_webhooks() {
let input = [
"https://github.com/alice",
"gh-token",
"",
"https://gitea.example.test/alice",
"gt-token",
"",
"n",
"y",
"https://mirror.example.test/webhook",
"y",
"30",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config =
run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
let webhook = config.webhook.unwrap();
assert!(webhook.install);
assert_eq!(webhook.url, "https://mirror.example.test/webhook");
assert_eq!(webhook.full_sync_interval_minutes, Some(30));
assert_eq!(webhook.reachability_check_interval_minutes, Some(15));
assert_eq!(
webhook.secret,
TokenConfig::Value("test-webhook-secret".to_string())
);
}
#[test]
fn wizard_reuses_existing_credentials_for_same_instance() {
let config = Config {
sites: vec![SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing".to_string()),
git_username: None,
}],
mirrors: Vec::new(),
webhook: None,
};
let input = [
"https://github.com/alice",
"",
"https://github.com/bob",
"",
"n",
"n",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert_eq!(updated.sites.len(), 1);
assert_eq!(updated.mirrors[0].endpoints[0].site, "github");
assert_eq!(updated.mirrors[0].endpoints[1].site, "github");
}
#[test]
fn wizard_starts_existing_config_at_sync_group_menu() {
let config = Config {
sites: vec![
SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gh".to_string()),
git_username: None,
},
SiteConfig {
name: "gitea".to_string(),
provider: ProviderKind::Gitea,
base_url: "https://gitea.example.test".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gt".to_string()),
git_username: None,
},
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let mut reader = Cursor::new(b"3\n".as_slice());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert_eq!(updated.mirrors.len(), 1);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("1. github.com/alice <-> gitea.example.test/alice"));
assert!(output.contains("What would you like to do?"));
assert!(!output.contains("Profile/org URL:"));
}
#[test]
fn wizard_deletes_existing_sync_group_from_menu() {
let config = Config {
sites: vec![
SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gh".to_string()),
git_username: None,
},
SiteConfig {
name: "gitea".to_string(),
provider: ProviderKind::Gitea,
base_url: "https://gitea.example.test".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gt".to_string()),
git_username: None,
},
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let input = ["2", "1", "3"].join("\n") + "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert!(updated.mirrors.is_empty());
let output = String::from_utf8(output).unwrap();
assert!(output.contains("Delete sync group"));
assert!(output.contains("2. Back"));
assert!(output.contains("deleted sync group 1"));
assert!(output.contains("No sync groups configured."));
}
#[test]
fn wizard_can_go_back_from_delete_menu() {
let config = Config {
sites: vec![
SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gh".to_string()),
git_username: None,
},
SiteConfig {
name: "gitea".to_string(),
provider: ProviderKind::Gitea,
base_url: "https://gitea.example.test".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gt".to_string()),
git_username: None,
},
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let input = ["2", "2", "3"].join("\n") + "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert_eq!(updated.mirrors.len(), 1);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("2. Back"));
assert!(!output.contains("deleted sync group"));
}
#[test]
fn wizard_reports_eof_instead_of_looping() {
let mut reader = Cursor::new(b"".as_slice());
let mut output = Vec::new();
let err = run_config_wizard_with_io(Config::default(), &mut reader, &mut output)
.unwrap_err()
.to_string();
assert!(err.contains("unexpected end of input"));
}
#[test]
fn profile_urls_are_parsed_into_base_and_namespace() {
let parsed = parse_profile_url("github.com/alice").unwrap();
assert_eq!(parsed.base_url, "https://github.com");
assert_eq!(parsed.host, "github.com");
assert_eq!(parsed.namespace, "alice");
let parsed = parse_profile_url("https://gitlab.example.test:8443/groups/team").unwrap();
assert_eq!(parsed.base_url, "https://gitlab.example.test:8443");
assert_eq!(parsed.namespace, "groups/team");
}
#[test]
fn site_names_are_derived_from_urls_and_made_unique() {
let mut config = Config::default();
assert_eq!(
default_site_name(&config, "https://github.com", &ProviderKind::Github),
"github"
);
assert_eq!(
default_site_name(
&config,
"https://git.my-company.com:3000",
&ProviderKind::Gitea
),
"git-my-company"
);
config.upsert_site(SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("token".to_string()),
git_username: None,
});
assert_eq!(
default_site_name(&config, "https://github.com", &ProviderKind::Github),
"github-2"
);
}
#[test]
fn token_creation_urls_are_provider_specific() {
assert_eq!(
token_creation_url(&ProviderKind::Github, "https://github.com/"),
"https://github.com/settings/tokens"
);
assert_eq!(
token_creation_url(&ProviderKind::Gitlab, "https://gitlab.example.test"),
"https://gitlab.example.test/-/user_settings/personal_access_tokens?name=git-sync&scopes=api"
);
assert_eq!(
token_creation_url(&ProviderKind::Gitea, "gitea.example.test"),
"https://gitea.example.test/user/settings/applications"
);
assert_eq!(
token_creation_url(&ProviderKind::Forgejo, "forgejo.example.test"),
"https://forgejo.example.test/user/settings/applications"
);
}
}
mod tests;
+367
View File
@@ -0,0 +1,367 @@
use super::*;
use anyhow::bail;
use std::io::{BufRead, Write};
pub fn run_config_wizard_with_io<R, W>(
mut config: Config,
reader: &mut R,
writer: &mut W,
) -> Result<Config>
where
R: BufRead,
W: Write,
{
writeln!(writer, "git-sync configuration wizard")?;
if config.mirrors.is_empty() {
add_sync_group(reader, writer, &mut config)?;
write_sync_groups(&config, writer)?;
} else {
write_sync_groups(&config, writer)?;
}
loop {
match prompt_wizard_action(reader, writer)? {
WizardAction::AddSyncGroup => {
add_sync_group(reader, writer, &mut config)?;
write_sync_groups(&config, writer)?;
}
WizardAction::DeleteSyncGroup => {
if delete_sync_group(reader, writer, &mut config)? {
write_sync_groups(&config, writer)?;
}
}
WizardAction::Done => break,
}
}
Ok(config)
}
fn add_sync_group<R, W>(reader: &mut R, writer: &mut W, config: &mut Config) -> Result<()>
where
R: BufRead,
W: Write,
{
let mut endpoints = Vec::new();
let first = prompt_target(reader, writer, "Profile/org URL")?;
endpoints.push(ensure_credentials(config, first, reader, writer)?);
let second = prompt_target(reader, writer, "Profile/org URL to sync with")?;
endpoints.push(ensure_credentials(config, second, reader, writer)?);
while prompt_bool(
reader,
writer,
"Add a third endpoint for 3-way sync?",
false,
)? {
let next = prompt_target(reader, writer, "Additional profile/org URL")?;
endpoints.push(ensure_credentials(config, next, reader, writer)?);
}
config.upsert_mirror(MirrorConfig {
name: next_mirror_name(config),
endpoints,
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
});
prompt_webhook_setup(reader, writer, config)?;
Ok(())
}
fn prompt_webhook_setup<R, W>(reader: &mut R, writer: &mut W, config: &mut Config) -> Result<()>
where
R: BufRead,
W: Write,
{
if config
.webhook
.as_ref()
.is_some_and(|webhook| webhook.install)
{
writeln!(writer, "Webhooks already enabled.")?;
return Ok(());
}
writeln!(
writer,
"Install webhooks? Strongly recommended because immediate sync greatly reduces conflicts."
)?;
if !prompt_bool(reader, writer, "Install webhook?", true)? {
return Ok(());
}
let url = prompt_required(reader, writer, "Webhook URL reachable by providers")?;
if let Err(error) = validate_url(&url) {
bail!(error);
}
let full_sync_interval_minutes = if prompt_bool(
reader,
writer,
"Run periodic full sync while serve is running?",
true,
)? {
Some(
prompt_with_default(reader, writer, "Full sync interval in minutes", "60")?
.parse::<u64>()
.context("full sync interval must be a number")?,
)
} else {
None
};
config.webhook = Some(WebhookConfig {
install: true,
url,
secret: TokenConfig::Value("test-webhook-secret".to_string()),
full_sync_interval_minutes,
reachability_check_interval_minutes: Some(15),
});
Ok(())
}
fn prompt_wizard_action<R, W>(reader: &mut R, writer: &mut W) -> Result<WizardAction>
where
R: BufRead,
W: Write,
{
loop {
writeln!(writer, "What would you like to do?")?;
writeln!(writer, " 1. Add another sync group")?;
writeln!(writer, " 2. Delete an existing group")?;
writeln!(writer, " 3. Done")?;
write!(writer, "Choose an option: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_ascii_lowercase();
match value.as_str() {
"1" | "add" | "add another sync group" => return Ok(WizardAction::AddSyncGroup),
"2" | "delete" | "delete an existing group" => {
return Ok(WizardAction::DeleteSyncGroup);
}
"3" | "done" | "finish" => return Ok(WizardAction::Done),
_ => writeln!(writer, "Enter 1, 2, or 3.")?,
}
}
}
fn delete_sync_group<R, W>(reader: &mut R, writer: &mut W, config: &mut Config) -> Result<bool>
where
R: BufRead,
W: Write,
{
if config.mirrors.is_empty() {
writeln!(writer, "No sync groups to delete.")?;
return Ok(false);
}
loop {
writeln!(writer, "Delete sync group")?;
for (index, option) in sync_group_summaries(config).iter().enumerate() {
writeln!(writer, " {}. {}", index + 1, option)?;
}
writeln!(writer, " {}. Back", config.mirrors.len() + 1)?;
write!(writer, "Choose a sync group: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_ascii_lowercase();
if value == "b" || value == "back" {
return Ok(false);
}
match value.parse::<usize>() {
Ok(index) if (1..=config.mirrors.len()).contains(&index) => {
let name = config.mirrors[index - 1].name.clone();
config.remove_mirror(&name)?;
writeln!(writer, "deleted sync group {index}")?;
return Ok(true);
}
Ok(index) if index == config.mirrors.len() + 1 => return Ok(false),
_ => writeln!(writer, "Enter a sync group number, or choose Back.")?,
}
}
}
fn prompt_target<R, W>(reader: &mut R, writer: &mut W, prompt: &str) -> Result<ProfileTarget>
where
R: BufRead,
W: Write,
{
let url = prompt_required(reader, writer, prompt)?;
let parsed = parse_profile_url(&url)?;
let provider = known_provider_from_host(&parsed.host).unwrap_or_else(|| {
prompt_provider(reader, writer, &parsed.base_url).expect("provider prompt failed")
});
Ok(ProfileTarget {
base_url: parsed.base_url,
provider,
namespace: parsed.namespace,
kind: None,
})
}
fn ensure_credentials<R, W>(
config: &mut Config,
target: ProfileTarget,
reader: &mut R,
writer: &mut W,
) -> Result<EndpointConfig>
where
R: BufRead,
W: Write,
{
if let Some(site) = config.sites.iter().find(|site| {
site.provider == target.provider
&& trim_url_end(&site.base_url) == trim_url_end(&target.base_url)
}) {
let kind = target.kind.clone().unwrap_or_else(|| {
prompt_namespace_kind(reader, writer, &target.namespace).expect("kind prompt failed")
});
let endpoint = target_endpoint(&target, kind, site.name.clone());
writeln!(
writer,
"Using existing credentials for {}",
target_display(&target)
)?;
return Ok(endpoint);
}
for line in pat_instruction_lines(&target.provider, &target.base_url) {
writeln!(writer, "{line}")?;
}
let token = prompt_required(reader, writer, "PAT token")?;
let site = SiteConfig {
name: default_site_name(config, &target.base_url, &target.provider),
provider: target.provider.clone(),
base_url: target.base_url.clone(),
api_url: None,
token: TokenConfig::Value(token),
git_username: None,
};
let site_name = site.name.clone();
config.upsert_site(site);
let kind = target.kind.clone().unwrap_or_else(|| {
prompt_namespace_kind(reader, writer, &target.namespace).expect("kind prompt failed")
});
Ok(target_endpoint(&target, kind, site_name))
}
fn prompt_provider<R, W>(reader: &mut R, writer: &mut W, base_url: &str) -> Result<ProviderKind>
where
R: BufRead,
W: Write,
{
loop {
let value = prompt_required(reader, writer, &format!("Provider for {base_url}"))?;
match value.to_ascii_lowercase().as_str() {
"github" => return Ok(ProviderKind::Github),
"gitlab" => return Ok(ProviderKind::Gitlab),
"gitea" => return Ok(ProviderKind::Gitea),
"forgejo" => return Ok(ProviderKind::Forgejo),
_ => writeln!(
writer,
"Provider must be github, gitlab, gitea, or forgejo."
)?,
}
}
}
fn prompt_namespace_kind<R, W>(
reader: &mut R,
writer: &mut W,
namespace: &str,
) -> Result<NamespaceKind>
where
R: BufRead,
W: Write,
{
loop {
let value = prompt_with_default(reader, writer, &format!("What is {namespace}?"), "user")?;
match value.to_ascii_lowercase().as_str() {
"user" => return Ok(NamespaceKind::User),
"org" | "organization" => return Ok(NamespaceKind::Org),
"group" => return Ok(NamespaceKind::Group),
_ => writeln!(writer, "Namespace kind must be user, org, or group.")?,
}
}
}
fn write_sync_groups<W>(config: &Config, writer: &mut W) -> Result<()>
where
W: Write,
{
writeln!(writer, "Sync groups")?;
if config.mirrors.is_empty() {
writeln!(writer, "No sync groups configured.")?;
return Ok(());
}
for (index, mirror) in config.mirrors.iter().enumerate() {
writeln!(
writer,
"{}. {}",
index + 1,
sync_group_summary(config, mirror)
)?;
}
Ok(())
}
fn prompt_bool<R, W>(reader: &mut R, writer: &mut W, label: &str, default: bool) -> Result<bool>
where
R: BufRead,
W: Write,
{
let default_label = if default { "Y/n" } else { "y/N" };
loop {
write!(writer, "{label} [{default_label}]: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_ascii_lowercase();
match value.as_str() {
"" => return Ok(default),
"y" | "yes" | "true" => return Ok(true),
"n" | "no" | "false" => return Ok(false),
_ => writeln!(writer, "Enter yes or no.")?,
}
}
}
fn prompt_required<R, W>(reader: &mut R, writer: &mut W, label: &str) -> Result<String>
where
R: BufRead,
W: Write,
{
loop {
write!(writer, "{label}: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_string();
if !value.is_empty() {
return Ok(value);
}
writeln!(writer, "A value is required.")?;
}
}
fn prompt_with_default<R, W>(
reader: &mut R,
writer: &mut W,
label: &str,
default: &str,
) -> Result<String>
where
R: BufRead,
W: Write,
{
write!(writer, "{label} [{default}]: ")?;
writer.flush()?;
let value = read_line(reader)?.trim().to_string();
if value.is_empty() {
Ok(default.to_string())
} else {
Ok(value)
}
}
fn read_line<R>(reader: &mut R) -> Result<String>
where
R: BufRead,
{
let mut value = String::new();
let bytes = reader.read_line(&mut value)?;
if bytes == 0 {
bail!("unexpected end of input while reading interactive configuration");
}
Ok(value)
}
+386
View File
@@ -0,0 +1,386 @@
use super::*;
use std::io::Cursor;
#[test]
fn wizard_builds_sync_group_from_profile_urls() {
let input = [
"https://github.com/hykilpikonna",
"gh-token",
"",
"https://gitea.example.test/azalea",
"gt-token",
"",
"n",
"n",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config = run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
assert_eq!(config.sites.len(), 2);
assert_eq!(config.sites[0].name, "github");
assert_eq!(config.sites[0].provider, ProviderKind::Github);
assert_eq!(config.sites[0].base_url, "https://github.com");
assert_eq!(
config.sites[0].token,
TokenConfig::Value("gh-token".to_string())
);
assert_eq!(config.sites[1].name, "gitea-example-test");
assert_eq!(config.sites[1].provider, ProviderKind::Gitea);
assert_eq!(config.sites[1].base_url, "https://gitea.example.test");
assert_eq!(config.mirrors.len(), 1);
assert_eq!(config.mirrors[0].name, "sync-1");
assert_eq!(config.mirrors[0].endpoints.len(), 2);
assert_eq!(config.mirrors[0].endpoints[0].site, "github");
assert_eq!(config.mirrors[0].endpoints[0].kind, NamespaceKind::User);
assert_eq!(config.mirrors[0].endpoints[0].namespace, "hykilpikonna");
assert_eq!(config.mirrors[0].endpoints[1].site, "gitea-example-test");
assert_eq!(config.mirrors[0].endpoints[1].namespace, "azalea");
assert!(config.mirrors[0].create_missing);
assert_eq!(config.mirrors[0].visibility, Visibility::Private);
assert!(!config.mirrors[0].allow_force);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("1. github.com/hykilpikonna <-> gitea.example.test/azalea"));
assert!(output.contains("Add another sync group"));
assert!(output.contains("Delete an existing group"));
assert!(output.contains("Done"));
}
#[test]
fn wizard_can_build_three_way_sync() {
let input = [
"https://github.com/alice",
"gh-token",
"",
"https://gitlab.com/alice",
"gl-token",
"",
"y",
"https://gitea.example.test/alice",
"gt-token",
"",
"n",
"n",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config = run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
assert_eq!(config.mirrors.len(), 1);
assert_eq!(config.mirrors[0].endpoints.len(), 3);
assert_eq!(config.sites.len(), 3);
}
#[test]
fn wizard_can_enable_webhooks() {
let input = [
"https://github.com/alice",
"gh-token",
"",
"https://gitea.example.test/alice",
"gt-token",
"",
"n",
"y",
"https://mirror.example.test/webhook",
"y",
"30",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config = run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
let webhook = config.webhook.unwrap();
assert!(webhook.install);
assert_eq!(webhook.url, "https://mirror.example.test/webhook");
assert_eq!(webhook.full_sync_interval_minutes, Some(30));
assert_eq!(webhook.reachability_check_interval_minutes, Some(15));
assert_eq!(
webhook.secret,
TokenConfig::Value("test-webhook-secret".to_string())
);
}
#[test]
fn wizard_reuses_existing_credentials_for_same_instance() {
let config = Config {
sites: vec![SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing".to_string()),
git_username: None,
}],
mirrors: Vec::new(),
webhook: None,
};
let input = [
"https://github.com/alice",
"",
"https://github.com/bob",
"",
"n",
"n",
"3",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert_eq!(updated.sites.len(), 1);
assert_eq!(updated.mirrors[0].endpoints[0].site, "github");
assert_eq!(updated.mirrors[0].endpoints[1].site, "github");
}
#[test]
fn wizard_starts_existing_config_at_sync_group_menu() {
let config = Config {
sites: vec![
SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gh".to_string()),
git_username: None,
},
SiteConfig {
name: "gitea".to_string(),
provider: ProviderKind::Gitea,
base_url: "https://gitea.example.test".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gt".to_string()),
git_username: None,
},
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let mut reader = Cursor::new(b"3\n".as_slice());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert_eq!(updated.mirrors.len(), 1);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("1. github.com/alice <-> gitea.example.test/alice"));
assert!(output.contains("What would you like to do?"));
assert!(!output.contains("Profile/org URL:"));
}
#[test]
fn wizard_deletes_existing_sync_group_from_menu() {
let config = Config {
sites: vec![
SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gh".to_string()),
git_username: None,
},
SiteConfig {
name: "gitea".to_string(),
provider: ProviderKind::Gitea,
base_url: "https://gitea.example.test".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gt".to_string()),
git_username: None,
},
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let input = ["2", "1", "3"].join("\n") + "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert!(updated.mirrors.is_empty());
let output = String::from_utf8(output).unwrap();
assert!(output.contains("Delete sync group"));
assert!(output.contains("2. Back"));
assert!(output.contains("deleted sync group 1"));
assert!(output.contains("No sync groups configured."));
}
#[test]
fn wizard_can_go_back_from_delete_menu() {
let config = Config {
sites: vec![
SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gh".to_string()),
git_username: None,
},
SiteConfig {
name: "gitea".to_string(),
provider: ProviderKind::Gitea,
base_url: "https://gitea.example.test".to_string(),
api_url: None,
token: TokenConfig::Value("existing-gt".to_string()),
git_username: None,
},
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let input = ["2", "2", "3"].join("\n") + "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let updated = run_config_wizard_with_io(config, &mut reader, &mut output).unwrap();
assert_eq!(updated.mirrors.len(), 1);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("2. Back"));
assert!(!output.contains("deleted sync group"));
}
#[test]
fn wizard_reports_eof_instead_of_looping() {
let mut reader = Cursor::new(b"".as_slice());
let mut output = Vec::new();
let err = run_config_wizard_with_io(Config::default(), &mut reader, &mut output)
.unwrap_err()
.to_string();
assert!(err.contains("unexpected end of input"));
}
#[test]
fn profile_urls_are_parsed_into_base_and_namespace() {
let parsed = parse_profile_url("github.com/alice").unwrap();
assert_eq!(parsed.base_url, "https://github.com");
assert_eq!(parsed.host, "github.com");
assert_eq!(parsed.namespace, "alice");
let parsed = parse_profile_url("https://gitlab.example.test:8443/groups/team").unwrap();
assert_eq!(parsed.base_url, "https://gitlab.example.test:8443");
assert_eq!(parsed.namespace, "groups/team");
}
#[test]
fn site_names_are_derived_from_urls_and_made_unique() {
let mut config = Config::default();
assert_eq!(
default_site_name(&config, "https://github.com", &ProviderKind::Github),
"github"
);
assert_eq!(
default_site_name(
&config,
"https://git.my-company.com:3000",
&ProviderKind::Gitea
),
"git-my-company"
);
config.upsert_site(SiteConfig {
name: "github".to_string(),
provider: ProviderKind::Github,
base_url: "https://github.com".to_string(),
api_url: None,
token: TokenConfig::Value("token".to_string()),
git_username: None,
});
assert_eq!(
default_site_name(&config, "https://github.com", &ProviderKind::Github),
"github-2"
);
}
#[test]
fn token_creation_urls_are_provider_specific() {
assert_eq!(
token_creation_url(&ProviderKind::Github, "https://github.com/"),
"https://github.com/settings/tokens"
);
assert_eq!(
token_creation_url(&ProviderKind::Gitlab, "https://gitlab.example.test"),
"https://gitlab.example.test/-/user_settings/personal_access_tokens?name=git-sync&scopes=api"
);
assert_eq!(
token_creation_url(&ProviderKind::Gitea, "gitea.example.test"),
"https://gitea.example.test/user/settings/applications"
);
assert_eq!(
token_creation_url(&ProviderKind::Forgejo, "forgejo.example.test"),
"https://forgejo.example.test/user/settings/applications"
);
}
+10 -9
View File
@@ -3,11 +3,12 @@ mod git;
mod interactive;
mod logging;
mod provider;
mod state;
mod sync;
mod webhook;
use std::env;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use clap::{Args, Parser, Subcommand};
@@ -120,8 +121,7 @@ fn main() -> Result<()> {
match cli.command {
Command::Config => interactive::run_config_wizard(&config_path),
Command::Sync(command) => {
let config = Config::load(&config_path)
.with_context(|| format!("failed to load config at {}", config_path.display()))?;
let config = load_config(&config_path)?;
sync_all(
&config,
SyncOptions {
@@ -137,8 +137,7 @@ fn main() -> Result<()> {
)
}
Command::Serve(command) => {
let config = Config::load(&config_path)
.with_context(|| format!("failed to load config at {}", config_path.display()))?;
let config = load_config(&config_path)?;
let full_sync_interval_minutes = command.full_sync_interval_minutes.or_else(|| {
config
.webhook
@@ -165,8 +164,7 @@ fn main() -> Result<()> {
)
}
Command::Webhook(WebhookCommand::Install(command)) => {
let config = Config::load(&config_path)
.with_context(|| format!("failed to load config at {}", config_path.display()))?;
let config = load_config(&config_path)?;
let secret = resolve_webhook_secret(&config, command.secret, command.secret_env)?;
let url = resolve_webhook_url(&config, command.url)?;
install_webhooks(
@@ -182,8 +180,7 @@ fn main() -> Result<()> {
)
}
Command::Webhook(WebhookCommand::Uninstall(command)) => {
let config = Config::load(&config_path)
.with_context(|| format!("failed to load config at {}", config_path.display()))?;
let config = load_config(&config_path)?;
uninstall_webhooks(
&config,
WebhookUninstallOptions {
@@ -196,6 +193,10 @@ fn main() -> Result<()> {
}
}
fn load_config(path: &Path) -> Result<Config> {
Config::load(path).with_context(|| format!("failed to load config at {}", path.display()))
}
fn resolve_webhook_secret(
config: &Config,
value: Option<String>,
+1 -325
View File
@@ -747,328 +747,4 @@ pub fn repos_by_name(repos: Vec<EndpointRepo>) -> HashMap<String, Vec<EndpointRe
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::TokenConfig;
use std::io::{Read, Write};
use std::net::TcpListener;
use std::thread;
#[test]
fn extracts_next_link() {
let mut headers = HeaderMap::new();
headers.insert(
"link",
HeaderValue::from_static("<https://example.test?page=2>; rel=\"next\", <https://example.test?page=5>; rel=\"last\""),
);
assert_eq!(next_link(&headers).unwrap(), "https://example.test?page=2");
}
#[test]
fn authenticated_clone_urls_use_provider_defaults() {
let github_site = site(ProviderKind::Github, None);
let github = ProviderClient::new(&github_site).unwrap();
assert_eq!(
github
.authenticated_clone_url("https://github.com/alice/repo.git")
.unwrap(),
"https://x-access-token:secret@github.com/alice/repo.git"
);
let gitlab_site = site(ProviderKind::Gitlab, None);
let gitlab = ProviderClient::new(&gitlab_site).unwrap();
assert_eq!(
gitlab
.authenticated_clone_url("https://gitlab.example.test/alice/repo.git")
.unwrap(),
"https://oauth2:secret@gitlab.example.test/alice/repo.git"
);
let forgejo_site = site(ProviderKind::Forgejo, None);
let forgejo = ProviderClient::new(&forgejo_site).unwrap();
assert_eq!(
forgejo
.authenticated_clone_url("https://forgejo.example.test/alice/repo.git")
.unwrap(),
"https://oauth2:secret@forgejo.example.test/alice/repo.git"
);
}
#[test]
fn authenticated_clone_urls_can_override_git_username() {
let gitea_site = site(ProviderKind::Gitea, Some("mirror-user".to_string()));
let client = ProviderClient::new(&gitea_site).unwrap();
assert_eq!(
client
.authenticated_clone_url("https://gitea.example.test/alice/repo.git")
.unwrap(),
"https://mirror-user:secret@gitea.example.test/alice/repo.git"
);
}
#[test]
fn group_paths_are_url_encoded_for_gitlab() {
assert_eq!(urlencoding("parent/child group"), "parent%2Fchild+group");
}
#[test]
fn validate_token_checks_user_endpoint_with_provider_auth_header() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(request.starts_with("GET /user "), "request was {request}");
assert!(
request
.to_ascii_lowercase()
.contains("authorization: bearer secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
ProviderClient::new(&site)
.unwrap()
.validate_token()
.unwrap();
handle.join().unwrap();
}
#[test]
fn validate_token_reports_provider_rejection() {
let (api_url, handle) = one_request_server("401 Unauthorized", "bad token", |request| {
assert!(request.starts_with("GET /user "), "request was {request}");
assert!(
request
.to_ascii_lowercase()
.contains("private-token: secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitlab, None)
};
let err = ProviderClient::new(&site)
.unwrap()
.validate_token()
.unwrap_err()
.to_string();
assert!(err.contains("401 Unauthorized"));
handle.join().unwrap();
}
#[test]
fn detect_namespace_kind_uses_authenticated_github_api() {
let (api_url, handle) =
one_request_server("200 OK", r#"{"type":"Organization"}"#, |request| {
assert!(
request.starts_with("GET /users/acme "),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("authorization: bearer secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let kind = ProviderClient::new(&site)
.unwrap()
.detect_namespace_kind("acme")
.unwrap();
assert_eq!(kind, Some(NamespaceKind::Org));
handle.join().unwrap();
}
#[test]
fn detect_namespace_kind_uses_authenticated_gitea_api() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(
request.starts_with("GET /orgs/acme "),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("authorization: token secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitea, None)
};
let kind = ProviderClient::new(&site)
.unwrap()
.detect_namespace_kind("acme")
.unwrap();
assert_eq!(kind, Some(NamespaceKind::Org));
handle.join().unwrap();
}
#[test]
fn install_webhook_posts_github_hook_when_missing() {
let (api_url, handle) = request_server(
vec![("200 OK", "[]"), ("201 Created", r#"{"id":1}"#)],
|index, request| match index {
0 => assert!(
request.starts_with("GET /repos/alice/repo/hooks "),
"request was {request}"
),
1 => {
assert!(
request.starts_with("POST /repos/alice/repo/hooks "),
"request was {request}"
);
assert!(request.contains("https://mirror.example.test/webhook"));
assert!(request.contains("secret"));
assert!(request.contains("push"));
}
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let client = ProviderClient::new(&site).unwrap();
client
.install_webhook(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
&RemoteRepo {
name: "repo".to_string(),
clone_url: "https://github.com/alice/repo.git".to_string(),
private: true,
description: None,
},
"https://mirror.example.test/webhook",
"secret",
)
.unwrap();
handle.join().unwrap();
}
#[test]
fn uninstall_webhook_deletes_matching_github_hook() {
let (api_url, handle) = request_server(
vec![
(
"200 OK",
r#"[{"id":42,"config":{"url":"https://mirror.example.test/webhook"}}]"#,
),
("204 No Content", ""),
],
|index, request| match index {
0 => assert!(
request.starts_with("GET /repos/alice/repo/hooks "),
"request was {request}"
),
1 => assert!(
request.starts_with("DELETE /repos/alice/repo/hooks/42 "),
"request was {request}"
),
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let client = ProviderClient::new(&site).unwrap();
let removed = client
.uninstall_webhook(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
"repo",
"https://mirror.example.test/webhook",
)
.unwrap();
assert!(removed);
handle.join().unwrap();
}
fn site(provider: ProviderKind, git_username: Option<String>) -> SiteConfig {
SiteConfig {
name: "site".to_string(),
provider,
base_url: "https://example.test".to_string(),
api_url: None,
token: TokenConfig::Value("secret".to_string()),
git_username,
}
}
fn one_request_server<F>(
status: &'static str,
body: &'static str,
assert_request: F,
) -> (String, thread::JoinHandle<()>)
where
F: FnOnce(&str) + Send + 'static,
{
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let address = listener.local_addr().unwrap();
let handle = thread::spawn(move || {
let (mut stream, _) = listener.accept().unwrap();
let mut buffer = [0_u8; 4096];
let bytes = stream.read(&mut buffer).unwrap();
let request = String::from_utf8_lossy(&buffer[..bytes]).to_string();
assert_request(&request);
write!(
stream,
"HTTP/1.1 {status}\r\ncontent-type: application/json\r\ncontent-length: {}\r\n\r\n{body}",
body.len()
)
.unwrap();
});
(format!("http://{address}"), handle)
}
fn request_server<F>(
responses: Vec<(&'static str, &'static str)>,
mut assert_request: F,
) -> (String, thread::JoinHandle<()>)
where
F: FnMut(usize, &str) + Send + 'static,
{
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let address = listener.local_addr().unwrap();
let handle = thread::spawn(move || {
for (index, (status, body)) in responses.into_iter().enumerate() {
let (mut stream, _) = listener.accept().unwrap();
let mut buffer = [0_u8; 4096];
let bytes = stream.read(&mut buffer).unwrap();
let request = String::from_utf8_lossy(&buffer[..bytes]).to_string();
assert_request(index, &request);
write!(
stream,
"HTTP/1.1 {status}\r\ncontent-type: application/json\r\nconnection: close\r\ncontent-length: {}\r\n\r\n{body}",
body.len()
)
.unwrap();
}
});
(format!("http://{address}"), handle)
}
}
mod tests;
+322
View File
@@ -0,0 +1,322 @@
use super::*;
use crate::config::TokenConfig;
use std::io::{Read, Write};
use std::net::TcpListener;
use std::thread;
#[test]
fn extracts_next_link() {
let mut headers = HeaderMap::new();
headers.insert(
"link",
HeaderValue::from_static("<https://example.test?page=2>; rel=\"next\", <https://example.test?page=5>; rel=\"last\""),
);
assert_eq!(next_link(&headers).unwrap(), "https://example.test?page=2");
}
#[test]
fn authenticated_clone_urls_use_provider_defaults() {
let github_site = site(ProviderKind::Github, None);
let github = ProviderClient::new(&github_site).unwrap();
assert_eq!(
github
.authenticated_clone_url("https://github.com/alice/repo.git")
.unwrap(),
"https://x-access-token:secret@github.com/alice/repo.git"
);
let gitlab_site = site(ProviderKind::Gitlab, None);
let gitlab = ProviderClient::new(&gitlab_site).unwrap();
assert_eq!(
gitlab
.authenticated_clone_url("https://gitlab.example.test/alice/repo.git")
.unwrap(),
"https://oauth2:secret@gitlab.example.test/alice/repo.git"
);
let forgejo_site = site(ProviderKind::Forgejo, None);
let forgejo = ProviderClient::new(&forgejo_site).unwrap();
assert_eq!(
forgejo
.authenticated_clone_url("https://forgejo.example.test/alice/repo.git")
.unwrap(),
"https://oauth2:secret@forgejo.example.test/alice/repo.git"
);
}
#[test]
fn authenticated_clone_urls_can_override_git_username() {
let gitea_site = site(ProviderKind::Gitea, Some("mirror-user".to_string()));
let client = ProviderClient::new(&gitea_site).unwrap();
assert_eq!(
client
.authenticated_clone_url("https://gitea.example.test/alice/repo.git")
.unwrap(),
"https://mirror-user:secret@gitea.example.test/alice/repo.git"
);
}
#[test]
fn group_paths_are_url_encoded_for_gitlab() {
assert_eq!(urlencoding("parent/child group"), "parent%2Fchild+group");
}
#[test]
fn validate_token_checks_user_endpoint_with_provider_auth_header() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(request.starts_with("GET /user "), "request was {request}");
assert!(
request
.to_ascii_lowercase()
.contains("authorization: bearer secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
ProviderClient::new(&site)
.unwrap()
.validate_token()
.unwrap();
handle.join().unwrap();
}
#[test]
fn validate_token_reports_provider_rejection() {
let (api_url, handle) = one_request_server("401 Unauthorized", "bad token", |request| {
assert!(request.starts_with("GET /user "), "request was {request}");
assert!(
request
.to_ascii_lowercase()
.contains("private-token: secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitlab, None)
};
let err = ProviderClient::new(&site)
.unwrap()
.validate_token()
.unwrap_err()
.to_string();
assert!(err.contains("401 Unauthorized"));
handle.join().unwrap();
}
#[test]
fn detect_namespace_kind_uses_authenticated_github_api() {
let (api_url, handle) = one_request_server("200 OK", r#"{"type":"Organization"}"#, |request| {
assert!(
request.starts_with("GET /users/acme "),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("authorization: bearer secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let kind = ProviderClient::new(&site)
.unwrap()
.detect_namespace_kind("acme")
.unwrap();
assert_eq!(kind, Some(NamespaceKind::Org));
handle.join().unwrap();
}
#[test]
fn detect_namespace_kind_uses_authenticated_gitea_api() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(
request.starts_with("GET /orgs/acme "),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("authorization: token secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitea, None)
};
let kind = ProviderClient::new(&site)
.unwrap()
.detect_namespace_kind("acme")
.unwrap();
assert_eq!(kind, Some(NamespaceKind::Org));
handle.join().unwrap();
}
#[test]
fn install_webhook_posts_github_hook_when_missing() {
let (api_url, handle) = request_server(
vec![("200 OK", "[]"), ("201 Created", r#"{"id":1}"#)],
|index, request| match index {
0 => assert!(
request.starts_with("GET /repos/alice/repo/hooks "),
"request was {request}"
),
1 => {
assert!(
request.starts_with("POST /repos/alice/repo/hooks "),
"request was {request}"
);
assert!(request.contains("https://mirror.example.test/webhook"));
assert!(request.contains("secret"));
assert!(request.contains("push"));
}
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let client = ProviderClient::new(&site).unwrap();
client
.install_webhook(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
&RemoteRepo {
name: "repo".to_string(),
clone_url: "https://github.com/alice/repo.git".to_string(),
private: true,
description: None,
},
"https://mirror.example.test/webhook",
"secret",
)
.unwrap();
handle.join().unwrap();
}
#[test]
fn uninstall_webhook_deletes_matching_github_hook() {
let (api_url, handle) = request_server(
vec![
(
"200 OK",
r#"[{"id":42,"config":{"url":"https://mirror.example.test/webhook"}}]"#,
),
("204 No Content", ""),
],
|index, request| match index {
0 => assert!(
request.starts_with("GET /repos/alice/repo/hooks "),
"request was {request}"
),
1 => assert!(
request.starts_with("DELETE /repos/alice/repo/hooks/42 "),
"request was {request}"
),
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let client = ProviderClient::new(&site).unwrap();
let removed = client
.uninstall_webhook(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
"repo",
"https://mirror.example.test/webhook",
)
.unwrap();
assert!(removed);
handle.join().unwrap();
}
fn site(provider: ProviderKind, git_username: Option<String>) -> SiteConfig {
SiteConfig {
name: "site".to_string(),
provider,
base_url: "https://example.test".to_string(),
api_url: None,
token: TokenConfig::Value("secret".to_string()),
git_username,
}
}
fn one_request_server<F>(
status: &'static str,
body: &'static str,
assert_request: F,
) -> (String, thread::JoinHandle<()>)
where
F: FnOnce(&str) + Send + 'static,
{
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let address = listener.local_addr().unwrap();
let handle = thread::spawn(move || {
let (mut stream, _) = listener.accept().unwrap();
let mut buffer = [0_u8; 4096];
let bytes = stream.read(&mut buffer).unwrap();
let request = String::from_utf8_lossy(&buffer[..bytes]).to_string();
assert_request(&request);
write!(
stream,
"HTTP/1.1 {status}\r\ncontent-type: application/json\r\ncontent-length: {}\r\n\r\n{body}",
body.len()
)
.unwrap();
});
(format!("http://{address}"), handle)
}
fn request_server<F>(
responses: Vec<(&'static str, &'static str)>,
mut assert_request: F,
) -> (String, thread::JoinHandle<()>)
where
F: FnMut(usize, &str) + Send + 'static,
{
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let address = listener.local_addr().unwrap();
let handle = thread::spawn(move || {
for (index, (status, body)) in responses.into_iter().enumerate() {
let (mut stream, _) = listener.accept().unwrap();
let mut buffer = [0_u8; 4096];
let bytes = stream.read(&mut buffer).unwrap();
let request = String::from_utf8_lossy(&buffer[..bytes]).to_string();
assert_request(index, &request);
write!(
stream,
"HTTP/1.1 {status}\r\ncontent-type: application/json\r\nconnection: close\r\ncontent-length: {}\r\n\r\n{body}",
body.len()
)
.unwrap();
}
});
(format!("http://{address}"), handle)
}
+37
View File
@@ -0,0 +1,37 @@
use std::fs;
use std::path::Path;
use anyhow::{Context, Result};
use serde::Serialize;
use serde::de::DeserializeOwned;
pub fn load_toml_or_default<T>(path: &Path) -> Result<T>
where
T: Default + DeserializeOwned,
{
if !path.exists() {
return Ok(T::default());
}
let contents =
fs::read_to_string(path).with_context(|| format!("failed to read {}", path.display()))?;
toml::from_str(&contents).with_context(|| format!("failed to parse {}", path.display()))
}
pub fn save_toml<T>(path: &Path, value: &T) -> Result<()>
where
T: Serialize,
{
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
let contents = toml::to_string_pretty(value)?;
fs::write(path, contents).with_context(|| format!("failed to write {}", path.display()))
}
pub fn remove_file_if_exists(path: &Path) -> Result<()> {
if path.exists() {
fs::remove_file(path).with_context(|| format!("failed to remove {}", path.display()))?;
}
Ok(())
}
+15 -456
View File
@@ -7,7 +7,6 @@ use std::thread;
use anyhow::{Context, Result, bail};
use console::style;
use regex::Regex;
use serde::{Deserialize, Serialize};
use crate::config::{Config, EndpointConfig, MirrorConfig, default_work_dir, validate_config};
use crate::git::{
@@ -18,8 +17,20 @@ use crate::logging;
use crate::provider::{EndpointRepo, ProviderClient, repos_by_name};
use crate::webhook;
const FAILURE_STATE_FILE: &str = "failed-repos.toml";
const REF_STATE_FILE: &str = "ref-state.toml";
mod output;
mod state;
use self::output::{
print_branch_decisions, print_branch_deletions, print_failure, print_failure_summary,
print_tag_decisions, short_sha,
};
use self::state::{
FailureState, RefState, RemoteRefState, SyncFailure, load_failure_state, load_ref_state,
save_failure_state, save_ref_state,
};
#[cfg(test)]
use self::state::{FailedRepo, failure_state_path};
pub const DEFAULT_JOBS: usize = 4;
#[derive(Clone, Debug)]
@@ -125,217 +136,6 @@ pub fn sync_all(config: &Config, options: SyncOptions) -> Result<()> {
Ok(())
}
#[derive(Debug)]
struct SyncFailure {
scope: String,
error: String,
retry: Option<FailedRepo>,
}
impl SyncFailure {
fn group(scope: String, error: anyhow::Error) -> Self {
Self {
scope,
error: format_error(&error),
retry: None,
}
}
fn repo(group: String, repo: String, error: anyhow::Error) -> Self {
Self {
scope: format!("{group}/{repo}"),
error: format_error(&error),
retry: Some(FailedRepo { group, repo }),
}
}
}
#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
struct FailedRepo {
group: String,
repo: String,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
struct FailureState {
#[serde(default)]
repos: Vec<FailedRepo>,
}
impl FailureState {
fn from_failures(failures: &[SyncFailure]) -> Self {
let repos = failures
.iter()
.filter_map(|failure| failure.retry.clone())
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
Self { repos }
}
fn repos_by_group(&self) -> BTreeMap<String, BTreeSet<String>> {
let mut output = BTreeMap::<String, BTreeSet<String>>::new();
for failure in &self.repos {
output
.entry(failure.group.clone())
.or_default()
.insert(failure.repo.clone());
}
output
}
}
fn load_failure_state(work_dir: &Path) -> Result<FailureState> {
let path = failure_state_path(work_dir);
if !path.exists() {
return Ok(FailureState::default());
}
let contents =
fs::read_to_string(&path).with_context(|| format!("failed to read {}", path.display()))?;
toml::from_str(&contents).with_context(|| format!("failed to parse {}", path.display()))
}
fn save_failure_state(work_dir: &Path, state: &FailureState) -> Result<()> {
let path = failure_state_path(work_dir);
if state.repos.is_empty() {
if path.exists() {
fs::remove_file(&path)
.with_context(|| format!("failed to remove {}", path.display()))?;
}
return Ok(());
}
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
let contents = toml::to_string_pretty(state)?;
fs::write(&path, contents).with_context(|| format!("failed to write {}", path.display()))
}
fn failure_state_path(work_dir: &Path) -> PathBuf {
work_dir.join(FAILURE_STATE_FILE)
}
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
struct RemoteRefState {
hash: String,
refs: usize,
#[serde(default)]
branches: BTreeMap<String, String>,
#[serde(default)]
tags: BTreeMap<String, String>,
}
impl From<RemoteRefSnapshot> for RemoteRefState {
fn from(value: RemoteRefSnapshot) -> Self {
Self {
hash: value.hash,
refs: value.refs,
branches: value.branches,
tags: value.tags,
}
}
}
impl From<&RemoteRefState> for RemoteRefSnapshot {
fn from(value: &RemoteRefState) -> Self {
Self {
hash: value.hash.clone(),
refs: value.refs,
branches: value.branches.clone(),
tags: value.tags.clone(),
}
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
struct RefState {
#[serde(default)]
repos: BTreeMap<String, BTreeMap<String, BTreeMap<String, RemoteRefState>>>,
}
impl RefState {
fn repo_matches(
&self,
group: &str,
repo: &str,
refs: &BTreeMap<String, RemoteRefState>,
) -> bool {
self.repos.get(group).and_then(|repos| repos.get(repo)) == Some(refs)
}
fn set_repo(&mut self, group: &str, repo: &str, refs: BTreeMap<String, RemoteRefState>) {
self.repos
.entry(group.to_string())
.or_default()
.insert(repo.to_string(), refs);
}
fn repo(&self, group: &str, repo: &str) -> Option<&BTreeMap<String, RemoteRefState>> {
self.repos.get(group).and_then(|repos| repos.get(repo))
}
}
fn load_ref_state(work_dir: &Path) -> Result<RefState> {
let path = ref_state_path(work_dir);
if !path.exists() {
return Ok(RefState::default());
}
let contents =
fs::read_to_string(&path).with_context(|| format!("failed to read {}", path.display()))?;
toml::from_str(&contents).with_context(|| format!("failed to parse {}", path.display()))
}
fn save_ref_state(work_dir: &Path, state: &RefState) -> Result<()> {
let path = ref_state_path(work_dir);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
let contents = toml::to_string_pretty(state)?;
fs::write(&path, contents).with_context(|| format!("failed to write {}", path.display()))
}
fn ref_state_path(work_dir: &Path) -> PathBuf {
work_dir.join(REF_STATE_FILE)
}
fn print_failure(scope: &str, error: &anyhow::Error) {
crate::logln!(
" {} {} {}",
style("fail").red().bold(),
style(scope).cyan(),
style(error_headline(error)).dim()
);
}
fn print_failure_summary(failures: &[SyncFailure]) {
crate::logln!();
crate::logln!(
"{} {}",
style("Failures").red().bold(),
style(format!("({})", failures.len())).dim()
);
for (index, failure) in failures.iter().enumerate() {
crate::logln!(" {}. {}", index + 1, style(&failure.scope).cyan().bold());
for line in failure.error.lines() {
crate::logln!(" {line}");
}
}
}
fn error_headline(error: &anyhow::Error) -> String {
format_error(error)
.lines()
.find(|line| !line.trim().is_empty())
.unwrap_or("unknown error")
.to_string()
}
fn format_error(error: &anyhow::Error) -> String {
format!("{error:#}")
}
struct GroupSyncContext<'a> {
config: &'a Config,
options: &'a SyncOptions,
@@ -1135,246 +935,5 @@ struct RepoRefSyncResult {
had_conflicts: bool,
}
fn print_branch_decisions(branches: &[crate::git::BranchDecision]) {
crate::logln!(
" {} {}",
style("branches").cyan().bold(),
style(format!("({})", branches.len())).dim()
);
for branch in branches {
crate::logln!(
" {} {} {}",
style(&branch.branch).cyan(),
style(format!("@{}", short_sha(&branch.sha))).dim(),
style(format!(
"{} -> {}",
branch.source_remotes.join("+"),
branch.target_remotes.join("+")
))
.dim()
);
}
}
fn print_branch_deletions(deletions: &[BranchDeletion]) {
crate::logln!(
" {} {}",
style("deleted branches").red().bold(),
style(format!("({})", deletions.len())).dim()
);
for deletion in deletions {
crate::logln!(
" {} {}",
style(&deletion.branch).cyan(),
style(format!(
"deleted on {} -> {}",
deletion.deleted_remotes.join("+"),
deletion.target_remotes.join("+")
))
.dim()
);
}
}
fn print_tag_decisions(tags: &[crate::git::TagDecision]) {
crate::logln!(
" {} {}",
style("tags").cyan().bold(),
style(format!("({})", tags.len())).dim()
);
for tag in tags {
crate::logln!(
" {} {} {}",
style(&tag.tag).cyan(),
style(format!("@{}", short_sha(&tag.sha))).dim(),
style(format!(
"{} -> {}",
tag.source_remotes.join("+"),
tag.target_remotes.join("+")
))
.dim()
);
}
}
fn short_sha(sha: &str) -> &str {
sha.get(..12).unwrap_or(sha)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn failure_state_persists_repo_failures_by_group() {
let temp = tempfile::TempDir::new().unwrap();
let failures = vec![
SyncFailure::repo(
"sync-1".to_string(),
"repo-a".to_string(),
anyhow::anyhow!("a"),
),
SyncFailure::repo(
"sync-1".to_string(),
"repo-a".to_string(),
anyhow::anyhow!("a again"),
),
SyncFailure::repo(
"sync-2".to_string(),
"repo-b".to_string(),
anyhow::anyhow!("b"),
),
SyncFailure::group(
"mirror group sync-3".to_string(),
anyhow::anyhow!("list failed"),
),
];
let state = FailureState::from_failures(&failures);
save_failure_state(temp.path(), &state).unwrap();
let loaded = load_failure_state(temp.path()).unwrap();
let by_group = loaded.repos_by_group();
assert_eq!(by_group["sync-1"].len(), 1);
assert!(by_group["sync-1"].contains("repo-a"));
assert_eq!(by_group["sync-2"].len(), 1);
assert!(by_group["sync-2"].contains("repo-b"));
assert!(!by_group.contains_key("sync-3"));
}
#[test]
fn empty_failure_state_removes_retry_file() {
let temp = tempfile::TempDir::new().unwrap();
let state = FailureState {
repos: vec![FailedRepo {
group: "sync-1".to_string(),
repo: "repo-a".to_string(),
}],
};
save_failure_state(temp.path(), &state).unwrap();
assert!(failure_state_path(temp.path()).exists());
save_failure_state(temp.path(), &FailureState::default()).unwrap();
assert!(!failure_state_path(temp.path()).exists());
}
#[test]
fn ref_state_persists_and_requires_exact_remote_ref_match() {
let temp = tempfile::TempDir::new().unwrap();
let mut refs = BTreeMap::new();
refs.insert(
"github_alice".to_string(),
remote_ref_state("abc", &[("main", "111")]),
);
refs.insert(
"gitea_alice".to_string(),
remote_ref_state("def", &[("main", "111")]),
);
let mut state = RefState::default();
state.set_repo("sync-1", "repo-a", refs.clone());
save_ref_state(temp.path(), &state).unwrap();
let loaded = load_ref_state(temp.path()).unwrap();
assert!(loaded.repo_matches("sync-1", "repo-a", &refs));
let mut changed_hash = refs.clone();
changed_hash.insert(
"github_alice".to_string(),
remote_ref_state("changed", &[("main", "111")]),
);
assert!(!loaded.repo_matches("sync-1", "repo-a", &changed_hash));
let mut missing_remote = refs;
missing_remote.remove("gitea_alice");
assert!(!loaded.repo_matches("sync-1", "repo-a", &missing_remote));
}
#[test]
fn branch_deletion_decisions_propagate_previous_synced_branch_deletion() {
let remotes = test_remotes();
let mut previous = BTreeMap::new();
previous.insert(
"github".to_string(),
remote_ref_state("a", &[("main", "111")]),
);
previous.insert(
"gitea".to_string(),
remote_ref_state("b", &[("main", "111")]),
);
let mut current = BTreeMap::new();
current.insert("github".to_string(), remote_ref_state("c", &[]));
current.insert(
"gitea".to_string(),
remote_ref_state("d", &[("main", "111")]),
);
let (deletions, conflicts, blocked) =
branch_deletion_decisions(&remotes, Some(&previous), &current);
assert!(conflicts.is_empty());
assert!(blocked.contains("main"));
assert_eq!(deletions.len(), 1);
assert_eq!(deletions[0].branch, "main");
assert_eq!(deletions[0].deleted_remotes, vec!["github".to_string()]);
assert_eq!(deletions[0].target_remotes, vec!["gitea".to_string()]);
}
#[test]
fn branch_deletion_decisions_conflict_when_branch_changed_elsewhere() {
let remotes = test_remotes();
let mut previous = BTreeMap::new();
previous.insert(
"github".to_string(),
remote_ref_state("a", &[("main", "111")]),
);
previous.insert(
"gitea".to_string(),
remote_ref_state("b", &[("main", "111")]),
);
let mut current = BTreeMap::new();
current.insert("github".to_string(), remote_ref_state("c", &[]));
current.insert(
"gitea".to_string(),
remote_ref_state("d", &[("main", "222")]),
);
let (deletions, conflicts, blocked) =
branch_deletion_decisions(&remotes, Some(&previous), &current);
assert!(deletions.is_empty());
assert!(blocked.contains("main"));
assert_eq!(conflicts.len(), 1);
assert_eq!(conflicts[0].branch, "main");
assert_eq!(conflicts[0].deleted_remotes, vec!["github".to_string()]);
assert_eq!(conflicts[0].changed_remotes, vec!["gitea".to_string()]);
}
fn remote_ref_state(hash: &str, branches: &[(&str, &str)]) -> RemoteRefState {
RemoteRefState {
hash: hash.to_string(),
refs: branches.len(),
branches: branches
.iter()
.map(|(branch, sha)| ((*branch).to_string(), (*sha).to_string()))
.collect(),
tags: BTreeMap::new(),
}
}
fn test_remotes() -> Vec<RemoteSpec> {
vec![
RemoteSpec {
name: "github".to_string(),
url: "https://github.invalid/alice/repo.git".to_string(),
display: "github:alice:User".to_string(),
},
RemoteSpec {
name: "gitea".to_string(),
url: "https://gitea.invalid/alice/repo.git".to_string(),
display: "gitea:alice:User".to_string(),
},
]
}
}
mod tests;
+107
View File
@@ -0,0 +1,107 @@
use console::style;
use crate::git::{BranchDecision, BranchDeletion, TagDecision};
use super::state::SyncFailure;
pub(super) fn print_failure(scope: &str, error: &anyhow::Error) {
crate::logln!(
" {} {} {}",
style("fail").red().bold(),
style(scope).cyan(),
style(error_headline(error)).dim()
);
}
pub(super) fn print_failure_summary(failures: &[SyncFailure]) {
crate::logln!();
crate::logln!(
"{} {}",
style("Failures").red().bold(),
style(format!("({})", failures.len())).dim()
);
for (index, failure) in failures.iter().enumerate() {
crate::logln!(" {}. {}", index + 1, style(&failure.scope).cyan().bold());
for line in failure.error.lines() {
crate::logln!(" {line}");
}
}
}
fn error_headline(error: &anyhow::Error) -> String {
format_error(error)
.lines()
.find(|line| !line.trim().is_empty())
.unwrap_or("unknown error")
.to_string()
}
pub(super) fn format_error(error: &anyhow::Error) -> String {
format!("{error:#}")
}
pub(super) fn print_branch_decisions(branches: &[BranchDecision]) {
crate::logln!(
" {} {}",
style("branches").cyan().bold(),
style(format!("({})", branches.len())).dim()
);
for branch in branches {
crate::logln!(
" {} {} {}",
style(&branch.branch).cyan(),
style(format!("@{}", short_sha(&branch.sha))).dim(),
style(format!(
"{} -> {}",
branch.source_remotes.join("+"),
branch.target_remotes.join("+")
))
.dim()
);
}
}
pub(super) fn print_branch_deletions(deletions: &[BranchDeletion]) {
crate::logln!(
" {} {}",
style("deleted branches").red().bold(),
style(format!("({})", deletions.len())).dim()
);
for deletion in deletions {
crate::logln!(
" {} {}",
style(&deletion.branch).cyan(),
style(format!(
"deleted on {} -> {}",
deletion.deleted_remotes.join("+"),
deletion.target_remotes.join("+")
))
.dim()
);
}
}
pub(super) fn print_tag_decisions(tags: &[TagDecision]) {
crate::logln!(
" {} {}",
style("tags").cyan().bold(),
style(format!("({})", tags.len())).dim()
);
for tag in tags {
crate::logln!(
" {} {} {}",
style(&tag.tag).cyan(),
style(format!("@{}", short_sha(&tag.sha))).dim(),
style(format!(
"{} -> {}",
tag.source_remotes.join("+"),
tag.target_remotes.join("+")
))
.dim()
);
}
}
pub(super) fn short_sha(sha: &str) -> &str {
sha.get(..12).unwrap_or(sha)
}
+171
View File
@@ -0,0 +1,171 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use anyhow::Result;
use serde::{Deserialize, Serialize};
use crate::git::RemoteRefSnapshot;
use crate::state::{load_toml_or_default, remove_file_if_exists, save_toml};
use super::output::format_error;
const FAILURE_STATE_FILE: &str = "failed-repos.toml";
const REF_STATE_FILE: &str = "ref-state.toml";
#[derive(Debug)]
pub(super) struct SyncFailure {
pub(super) scope: String,
pub(super) error: String,
retry: Option<FailedRepo>,
}
impl SyncFailure {
pub(super) fn group(scope: String, error: anyhow::Error) -> Self {
Self {
scope,
error: format_error(&error),
retry: None,
}
}
pub(super) fn repo(group: String, repo: String, error: anyhow::Error) -> Self {
Self {
scope: format!("{group}/{repo}"),
error: format_error(&error),
retry: Some(FailedRepo { group, repo }),
}
}
}
#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)]
pub(super) struct FailedRepo {
pub(super) group: String,
pub(super) repo: String,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub(super) struct FailureState {
#[serde(default)]
pub(super) repos: Vec<FailedRepo>,
}
impl FailureState {
pub(super) fn from_failures(failures: &[SyncFailure]) -> Self {
let repos = failures
.iter()
.filter_map(|failure| failure.retry.clone())
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
Self { repos }
}
pub(super) fn repos_by_group(&self) -> BTreeMap<String, BTreeSet<String>> {
let mut output = BTreeMap::<String, BTreeSet<String>>::new();
for failure in &self.repos {
output
.entry(failure.group.clone())
.or_default()
.insert(failure.repo.clone());
}
output
}
}
pub(super) fn load_failure_state(work_dir: &Path) -> Result<FailureState> {
load_toml_or_default(&failure_state_path(work_dir))
}
pub(super) fn save_failure_state(work_dir: &Path, state: &FailureState) -> Result<()> {
let path = failure_state_path(work_dir);
if state.repos.is_empty() {
remove_file_if_exists(&path)
} else {
save_toml(&path, state)
}
}
pub(super) fn failure_state_path(work_dir: &Path) -> PathBuf {
work_dir.join(FAILURE_STATE_FILE)
}
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub(super) struct RemoteRefState {
pub(super) hash: String,
pub(super) refs: usize,
#[serde(default)]
pub(super) branches: BTreeMap<String, String>,
#[serde(default)]
pub(super) tags: BTreeMap<String, String>,
}
impl From<RemoteRefSnapshot> for RemoteRefState {
fn from(value: RemoteRefSnapshot) -> Self {
Self {
hash: value.hash,
refs: value.refs,
branches: value.branches,
tags: value.tags,
}
}
}
impl From<&RemoteRefState> for RemoteRefSnapshot {
fn from(value: &RemoteRefState) -> Self {
Self {
hash: value.hash.clone(),
refs: value.refs,
branches: value.branches.clone(),
tags: value.tags.clone(),
}
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub(super) struct RefState {
#[serde(default)]
repos: BTreeMap<String, BTreeMap<String, BTreeMap<String, RemoteRefState>>>,
}
impl RefState {
pub(super) fn repo_matches(
&self,
group: &str,
repo: &str,
refs: &BTreeMap<String, RemoteRefState>,
) -> bool {
self.repos.get(group).and_then(|repos| repos.get(repo)) == Some(refs)
}
pub(super) fn set_repo(
&mut self,
group: &str,
repo: &str,
refs: BTreeMap<String, RemoteRefState>,
) {
self.repos
.entry(group.to_string())
.or_default()
.insert(repo.to_string(), refs);
}
pub(super) fn repo(
&self,
group: &str,
repo: &str,
) -> Option<&BTreeMap<String, RemoteRefState>> {
self.repos.get(group).and_then(|repos| repos.get(repo))
}
}
pub(super) fn load_ref_state(work_dir: &Path) -> Result<RefState> {
load_toml_or_default(&ref_state_path(work_dir))
}
pub(super) fn save_ref_state(work_dir: &Path, state: &RefState) -> Result<()> {
save_toml(&ref_state_path(work_dir), state)
}
fn ref_state_path(work_dir: &Path) -> PathBuf {
work_dir.join(REF_STATE_FILE)
}
+174
View File
@@ -0,0 +1,174 @@
use super::*;
#[test]
fn failure_state_persists_repo_failures_by_group() {
let temp = tempfile::TempDir::new().unwrap();
let failures = vec![
SyncFailure::repo(
"sync-1".to_string(),
"repo-a".to_string(),
anyhow::anyhow!("a"),
),
SyncFailure::repo(
"sync-1".to_string(),
"repo-a".to_string(),
anyhow::anyhow!("a again"),
),
SyncFailure::repo(
"sync-2".to_string(),
"repo-b".to_string(),
anyhow::anyhow!("b"),
),
SyncFailure::group(
"mirror group sync-3".to_string(),
anyhow::anyhow!("list failed"),
),
];
let state = FailureState::from_failures(&failures);
save_failure_state(temp.path(), &state).unwrap();
let loaded = load_failure_state(temp.path()).unwrap();
let by_group = loaded.repos_by_group();
assert_eq!(by_group["sync-1"].len(), 1);
assert!(by_group["sync-1"].contains("repo-a"));
assert_eq!(by_group["sync-2"].len(), 1);
assert!(by_group["sync-2"].contains("repo-b"));
assert!(!by_group.contains_key("sync-3"));
}
#[test]
fn empty_failure_state_removes_retry_file() {
let temp = tempfile::TempDir::new().unwrap();
let state = FailureState {
repos: vec![FailedRepo {
group: "sync-1".to_string(),
repo: "repo-a".to_string(),
}],
};
save_failure_state(temp.path(), &state).unwrap();
assert!(failure_state_path(temp.path()).exists());
save_failure_state(temp.path(), &FailureState::default()).unwrap();
assert!(!failure_state_path(temp.path()).exists());
}
#[test]
fn ref_state_persists_and_requires_exact_remote_ref_match() {
let temp = tempfile::TempDir::new().unwrap();
let mut refs = BTreeMap::new();
refs.insert(
"github_alice".to_string(),
remote_ref_state("abc", &[("main", "111")]),
);
refs.insert(
"gitea_alice".to_string(),
remote_ref_state("def", &[("main", "111")]),
);
let mut state = RefState::default();
state.set_repo("sync-1", "repo-a", refs.clone());
save_ref_state(temp.path(), &state).unwrap();
let loaded = load_ref_state(temp.path()).unwrap();
assert!(loaded.repo_matches("sync-1", "repo-a", &refs));
let mut changed_hash = refs.clone();
changed_hash.insert(
"github_alice".to_string(),
remote_ref_state("changed", &[("main", "111")]),
);
assert!(!loaded.repo_matches("sync-1", "repo-a", &changed_hash));
let mut missing_remote = refs;
missing_remote.remove("gitea_alice");
assert!(!loaded.repo_matches("sync-1", "repo-a", &missing_remote));
}
#[test]
fn branch_deletion_decisions_propagate_previous_synced_branch_deletion() {
let remotes = test_remotes();
let mut previous = BTreeMap::new();
previous.insert(
"github".to_string(),
remote_ref_state("a", &[("main", "111")]),
);
previous.insert(
"gitea".to_string(),
remote_ref_state("b", &[("main", "111")]),
);
let mut current = BTreeMap::new();
current.insert("github".to_string(), remote_ref_state("c", &[]));
current.insert(
"gitea".to_string(),
remote_ref_state("d", &[("main", "111")]),
);
let (deletions, conflicts, blocked) =
branch_deletion_decisions(&remotes, Some(&previous), &current);
assert!(conflicts.is_empty());
assert!(blocked.contains("main"));
assert_eq!(deletions.len(), 1);
assert_eq!(deletions[0].branch, "main");
assert_eq!(deletions[0].deleted_remotes, vec!["github".to_string()]);
assert_eq!(deletions[0].target_remotes, vec!["gitea".to_string()]);
}
#[test]
fn branch_deletion_decisions_conflict_when_branch_changed_elsewhere() {
let remotes = test_remotes();
let mut previous = BTreeMap::new();
previous.insert(
"github".to_string(),
remote_ref_state("a", &[("main", "111")]),
);
previous.insert(
"gitea".to_string(),
remote_ref_state("b", &[("main", "111")]),
);
let mut current = BTreeMap::new();
current.insert("github".to_string(), remote_ref_state("c", &[]));
current.insert(
"gitea".to_string(),
remote_ref_state("d", &[("main", "222")]),
);
let (deletions, conflicts, blocked) =
branch_deletion_decisions(&remotes, Some(&previous), &current);
assert!(deletions.is_empty());
assert!(blocked.contains("main"));
assert_eq!(conflicts.len(), 1);
assert_eq!(conflicts[0].branch, "main");
assert_eq!(conflicts[0].deleted_remotes, vec!["github".to_string()]);
assert_eq!(conflicts[0].changed_remotes, vec!["gitea".to_string()]);
}
fn remote_ref_state(hash: &str, branches: &[(&str, &str)]) -> RemoteRefState {
RemoteRefState {
hash: hash.to_string(),
refs: branches.len(),
branches: branches
.iter()
.map(|(branch, sha)| ((*branch).to_string(), (*sha).to_string()))
.collect(),
tags: BTreeMap::new(),
}
}
fn test_remotes() -> Vec<RemoteSpec> {
vec![
RemoteSpec {
name: "github".to_string(),
url: "https://github.invalid/alice/repo.git".to_string(),
display: "github:alice:User".to_string(),
},
RemoteSpec {
name: "gitea".to_string(),
url: "https://gitea.invalid/alice/repo.git".to_string(),
display: "gitea:alice:User".to_string(),
},
]
}
+4 -187
View File
@@ -1,5 +1,4 @@
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::sync::{Arc, Mutex, mpsc};
@@ -19,6 +18,7 @@ use crate::config::{
Config, EndpointConfig, MirrorConfig, ProviderKind, default_work_dir, validate_config,
};
use crate::provider::{EndpointRepo, ProviderClient, RemoteRepo};
use crate::state::{load_toml_or_default, save_toml};
use crate::sync::{SyncOptions, sync_all};
type HmacSha256 = Hmac<Sha256>;
@@ -482,23 +482,11 @@ fn webhook_installation_key(group: &str, endpoint: &EndpointConfig, repo: &str)
}
fn load_webhook_state(work_dir: &Path) -> Result<WebhookState> {
let path = webhook_state_path(work_dir);
if !path.exists() {
return Ok(WebhookState::default());
}
let contents =
fs::read_to_string(&path).with_context(|| format!("failed to read {}", path.display()))?;
toml::from_str(&contents).with_context(|| format!("failed to parse {}", path.display()))
load_toml_or_default(&webhook_state_path(work_dir))
}
fn save_webhook_state(work_dir: &Path, state: &WebhookState) -> Result<()> {
let path = webhook_state_path(work_dir);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
let contents = toml::to_string_pretty(state)?;
fs::write(&path, contents).with_context(|| format!("failed to write {}", path.display()))
save_toml(&webhook_state_path(work_dir), state)
}
fn webhook_state_path(work_dir: &Path) -> PathBuf {
@@ -781,175 +769,4 @@ fn fixed_time_eq(left: &[u8], right: &[u8]) -> bool {
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::{
EndpointConfig, MirrorConfig, NamespaceKind, SiteConfig, TokenConfig, Visibility,
};
#[test]
fn verifies_github_hmac_signature() {
let body = br#"{"repository":{"name":"repo"}}"#;
let mut headers = HashMap::new();
headers.insert("x-github-event".to_string(), "push".to_string());
headers.insert(
"x-hub-signature-256".to_string(),
format!("sha256={}", hmac_sha256_hex(b"secret", body)),
);
assert!(verify_signature(
Some(&ProviderKind::Github),
&headers,
body,
"secret"
));
assert!(!verify_signature(
Some(&ProviderKind::Github),
&headers,
body,
"wrong"
));
}
#[test]
fn parses_github_push_payload() {
let mut headers = HashMap::new();
headers.insert("x-github-event".to_string(), "push".to_string());
let value: Value = serde_json::from_str(
r#"{"repository":{"name":"repo","full_name":"alice/repo","owner":{"login":"alice"}}}"#,
)
.unwrap();
let event = parse_event(Some(ProviderKind::Github), &headers, &value).unwrap();
assert_eq!(event.repo, "repo");
assert_eq!(event.namespace.as_deref(), Some("alice"));
}
#[test]
fn parses_forgejo_push_payload() {
let mut headers = HashMap::new();
headers.insert("x-forgejo-event".to_string(), "push".to_string());
let value: Value = serde_json::from_str(
r#"{"repository":{"name":"repo","full_name":"azalea/repo","owner":{"username":"azalea"}}}"#,
)
.unwrap();
let provider = detect_provider(&headers);
let event = parse_event(provider.clone(), &headers, &value).unwrap();
assert_eq!(provider, Some(ProviderKind::Forgejo));
assert_eq!(event.repo, "repo");
assert_eq!(event.namespace.as_deref(), Some("azalea"));
}
#[test]
fn verifies_forgejo_hmac_signature() {
let body = br#"{"repository":{"name":"repo"}}"#;
let mut headers = HashMap::new();
headers.insert("x-forgejo-event".to_string(), "push".to_string());
headers.insert(
"x-forgejo-signature".to_string(),
format!("sha256={}", hmac_sha256_hex(b"secret", body)),
);
assert!(verify_signature(
Some(&ProviderKind::Forgejo),
&headers,
body,
"secret"
));
}
#[test]
fn parses_gitlab_push_payload() {
let mut headers = HashMap::new();
headers.insert("x-gitlab-event".to_string(), "Push Hook".to_string());
let value: Value = serde_json::from_str(
r#"{"project":{"path":"repo","path_with_namespace":"parent/alice/repo"}}"#,
)
.unwrap();
let event = parse_event(Some(ProviderKind::Gitlab), &headers, &value).unwrap();
assert_eq!(event.repo, "repo");
assert_eq!(event.namespace.as_deref(), Some("parent/alice"));
}
#[test]
fn matches_jobs_by_provider_and_namespace() {
let config = Config {
sites: vec![
site("github", ProviderKind::Github),
site("gitea", ProviderKind::Gitea),
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
endpoint("github", NamespaceKind::User, "alice"),
endpoint("gitea", NamespaceKind::User, "azalea"),
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let event = WebhookEvent {
provider: Some(ProviderKind::Github),
repo: "repo".to_string(),
namespace: Some("alice".to_string()),
};
let jobs = matching_jobs(&config, &event);
assert_eq!(jobs.len(), 1);
assert_eq!(jobs[0].group, "sync-1");
assert_eq!(jobs[0].repo, "repo");
}
#[test]
fn webhook_state_persists_installations() {
let temp = tempfile::TempDir::new().unwrap();
let endpoint = endpoint("github", NamespaceKind::User, "alice");
let key = webhook_installation_key("sync-1", &endpoint, "repo");
let mut state = WebhookState::default();
state.installations.insert(
key.clone(),
WebhookInstallation {
group: "sync-1".to_string(),
endpoint,
repo: "repo".to_string(),
url: "https://mirror.example.test/webhook".to_string(),
},
);
save_webhook_state(temp.path(), &state).unwrap();
let loaded = load_webhook_state(temp.path()).unwrap();
assert_eq!(loaded.installations[&key].repo, "repo");
assert_eq!(
loaded.installations[&key].url,
"https://mirror.example.test/webhook"
);
}
fn site(name: &str, provider: ProviderKind) -> SiteConfig {
SiteConfig {
name: name.to_string(),
provider,
base_url: "https://example.test".to_string(),
api_url: None,
token: TokenConfig::Value("secret".to_string()),
git_username: None,
}
}
fn endpoint(site: &str, kind: NamespaceKind, namespace: &str) -> EndpointConfig {
EndpointConfig {
site: site.to_string(),
kind,
namespace: namespace.to_string(),
}
}
}
mod tests;
+170
View File
@@ -0,0 +1,170 @@
use super::*;
use crate::config::{
EndpointConfig, MirrorConfig, NamespaceKind, SiteConfig, TokenConfig, Visibility,
};
#[test]
fn verifies_github_hmac_signature() {
let body = br#"{"repository":{"name":"repo"}}"#;
let mut headers = HashMap::new();
headers.insert("x-github-event".to_string(), "push".to_string());
headers.insert(
"x-hub-signature-256".to_string(),
format!("sha256={}", hmac_sha256_hex(b"secret", body)),
);
assert!(verify_signature(
Some(&ProviderKind::Github),
&headers,
body,
"secret"
));
assert!(!verify_signature(
Some(&ProviderKind::Github),
&headers,
body,
"wrong"
));
}
#[test]
fn parses_github_push_payload() {
let mut headers = HashMap::new();
headers.insert("x-github-event".to_string(), "push".to_string());
let value: Value = serde_json::from_str(
r#"{"repository":{"name":"repo","full_name":"alice/repo","owner":{"login":"alice"}}}"#,
)
.unwrap();
let event = parse_event(Some(ProviderKind::Github), &headers, &value).unwrap();
assert_eq!(event.repo, "repo");
assert_eq!(event.namespace.as_deref(), Some("alice"));
}
#[test]
fn parses_forgejo_push_payload() {
let mut headers = HashMap::new();
headers.insert("x-forgejo-event".to_string(), "push".to_string());
let value: Value = serde_json::from_str(
r#"{"repository":{"name":"repo","full_name":"azalea/repo","owner":{"username":"azalea"}}}"#,
)
.unwrap();
let provider = detect_provider(&headers);
let event = parse_event(provider.clone(), &headers, &value).unwrap();
assert_eq!(provider, Some(ProviderKind::Forgejo));
assert_eq!(event.repo, "repo");
assert_eq!(event.namespace.as_deref(), Some("azalea"));
}
#[test]
fn verifies_forgejo_hmac_signature() {
let body = br#"{"repository":{"name":"repo"}}"#;
let mut headers = HashMap::new();
headers.insert("x-forgejo-event".to_string(), "push".to_string());
headers.insert(
"x-forgejo-signature".to_string(),
format!("sha256={}", hmac_sha256_hex(b"secret", body)),
);
assert!(verify_signature(
Some(&ProviderKind::Forgejo),
&headers,
body,
"secret"
));
}
#[test]
fn parses_gitlab_push_payload() {
let mut headers = HashMap::new();
headers.insert("x-gitlab-event".to_string(), "Push Hook".to_string());
let value: Value = serde_json::from_str(
r#"{"project":{"path":"repo","path_with_namespace":"parent/alice/repo"}}"#,
)
.unwrap();
let event = parse_event(Some(ProviderKind::Gitlab), &headers, &value).unwrap();
assert_eq!(event.repo, "repo");
assert_eq!(event.namespace.as_deref(), Some("parent/alice"));
}
#[test]
fn matches_jobs_by_provider_and_namespace() {
let config = Config {
sites: vec![
site("github", ProviderKind::Github),
site("gitea", ProviderKind::Gitea),
],
mirrors: vec![MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![
endpoint("github", NamespaceKind::User, "alice"),
endpoint("gitea", NamespaceKind::User, "azalea"),
],
create_missing: true,
visibility: Visibility::Private,
allow_force: false,
}],
webhook: None,
};
let event = WebhookEvent {
provider: Some(ProviderKind::Github),
repo: "repo".to_string(),
namespace: Some("alice".to_string()),
};
let jobs = matching_jobs(&config, &event);
assert_eq!(jobs.len(), 1);
assert_eq!(jobs[0].group, "sync-1");
assert_eq!(jobs[0].repo, "repo");
}
#[test]
fn webhook_state_persists_installations() {
let temp = tempfile::TempDir::new().unwrap();
let endpoint = endpoint("github", NamespaceKind::User, "alice");
let key = webhook_installation_key("sync-1", &endpoint, "repo");
let mut state = WebhookState::default();
state.installations.insert(
key.clone(),
WebhookInstallation {
group: "sync-1".to_string(),
endpoint,
repo: "repo".to_string(),
url: "https://mirror.example.test/webhook".to_string(),
},
);
save_webhook_state(temp.path(), &state).unwrap();
let loaded = load_webhook_state(temp.path()).unwrap();
assert_eq!(loaded.installations[&key].repo, "repo");
assert_eq!(
loaded.installations[&key].url,
"https://mirror.example.test/webhook"
);
}
fn site(name: &str, provider: ProviderKind) -> SiteConfig {
SiteConfig {
name: name.to_string(),
provider,
base_url: "https://example.test".to_string(),
api_url: None,
token: TokenConfig::Value("secret".to_string()),
git_username: None,
}
}
fn endpoint(site: &str, kind: NamespaceKind, namespace: &str) -> EndpointConfig {
EndpointConfig {
site: site.to_string(),
kind,
namespace: namespace.to_string(),
}
}