Compare commits

...

2 Commits

Author SHA1 Message Date
azalea 659e78b8ec [+] Force push detection & sync
Build executables / Windows x86_64 (push) Has been cancelled
Build executables / macOS universal (push) Has been cancelled
Build executables / Linux arm64 musl static (push) Has been cancelled
Build executables / Linux x86_64 musl static (push) Has been cancelled
2026-05-11 06:04:23 +00:00
azalea 965304c47d [F] Rebase commit committer should be kept 2026-05-10 21:42:12 +00:00
5 changed files with 866 additions and 10 deletions
+2
View File
@@ -240,6 +240,8 @@ Conflict resolution strategies are configured per mirror group:
When a previously opened conflict pull request is merged, the next sync sees the merged branch as the winning tip, pushes it to the other endpoints, and closes stale `refray/conflicts/...` pull requests for that branch. When a previously opened conflict pull request is merged, the next sync sees the merged branch as the winning tip, pushes it to the other endpoints, and closes stale `refray/conflicts/...` pull requests for that branch.
Force-pushes are propagated only when `refray` can infer intent from the previous successful sync state. If a branch previously matched everywhere, one endpoint rewrites that branch to a non-descendant tip, and the other endpoints still have the previous tip, `refray` writes local backup refs and a bundle under the work-dir `backups/` directory before force-pushing the rewritten tip to the other endpoints. If multiple endpoints rewrite the branch differently, or another endpoint also advances independently, the branch is treated as a conflict and skipped.
Repository and branch deletion are propagated only when it is safe to infer intent, and `refray` writes local backup refs and bundle files under the work-dir `backups/` directory before propagating those deletions. If a repository existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous synced refs, `refray` deletes it from the remaining endpoints instead of recreating it when `delete_missing = true`. If `delete_missing = false`, that missing repository is not treated as a deletion and normal missing-repository handling applies. If the repository was deleted everywhere, `refray` removes its saved sync state after creating a local backup from the mirror cache. If the repository was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped. Repository and branch deletion are propagated only when it is safe to infer intent, and `refray` writes local backup refs and bundle files under the work-dir `backups/` directory before propagating those deletions. If a repository existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous synced refs, `refray` deletes it from the remaining endpoints instead of recreating it when `delete_missing = true`. If `delete_missing = false`, that missing repository is not treated as a deletion and normal missing-repository handling applies. If the repository was deleted everywhere, `refray` removes its saved sync state after creating a local backup from the mirror cache. If the repository was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
Branch deletion follows the same rule at branch scope: if a branch existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous tip, `refray` deletes it from the remaining endpoints instead of recreating it. If the branch was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped. Branch deletion follows the same rule at branch scope: if a branch existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous tip, `refray` deletes it from the remaining endpoints instead of recreating it. If the branch was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
+101 -4
View File
@@ -86,6 +86,12 @@ pub struct GitMirror {
dry_run: bool, dry_run: bool,
} }
struct CommitterIdentity {
name: String,
email: String,
date: String,
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Redactor { pub struct Redactor {
secrets: Vec<String>, secrets: Vec<String>,
@@ -582,6 +588,7 @@ impl GitMirror {
return Ok(format!("dry-run-rebased-{}", short_sha(tip))); return Ok(format!("dry-run-rebased-{}", short_sha(tip)));
} }
let commits = self.replay_commits(base, tip)?;
let worktree = tempfile::TempDir::new().context("failed to create temporary worktree")?; let worktree = tempfile::TempDir::new().context("failed to create temporary worktree")?;
let worktree_path = worktree.path().to_path_buf(); let worktree_path = worktree.path().to_path_buf();
self.run([ self.run([
@@ -589,12 +596,13 @@ impl GitMirror {
"add", "add",
"--detach", "--detach",
worktree_path.to_str().unwrap(), worktree_path.to_str().unwrap(),
tip, onto,
])?; ])?;
let rebase_result = self.worktree_git(&worktree_path, ["rebase", "--onto", onto, base]); let replay_result = self.replay_commits_preserving_committer(&worktree_path, &commits);
if let Err(error) = rebase_result { if let Err(error) = replay_result {
let _ = self.worktree_git(&worktree_path, ["rebase", "--abort"]); let _ = self.worktree_git(&worktree_path, ["cherry-pick", "--abort"]);
let _ = self.worktree_git(&worktree_path, ["reset", "--hard"]);
let _ = self.run([ let _ = self.run([
"worktree", "worktree",
"remove", "remove",
@@ -613,6 +621,66 @@ impl GitMirror {
Ok(rebased.trim().to_string()) Ok(rebased.trim().to_string())
} }
fn replay_commits(&self, base: &str, tip: &str) -> Result<Vec<String>> {
let range = format!("{base}..{tip}");
Ok(self
.output([
"rev-list",
"--reverse",
"--topo-order",
"--no-merges",
&range,
])?
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.map(ToOwned::to_owned)
.collect())
}
fn replay_commits_preserving_committer(
&self,
worktree: &Path,
commits: &[String],
) -> Result<()> {
for commit in commits {
let committer = self.committer_identity(commit)?;
self.worktree_git(worktree, ["cherry-pick", "--no-commit", commit])?;
self.worktree_git_with_committer(
worktree,
["commit", "--no-gpg-sign", "-C", commit],
&committer,
)?;
}
Ok(())
}
fn committer_identity(&self, commit: &str) -> Result<CommitterIdentity> {
let output = self.output(["show", "-s", "--format=%cn%x00%ce%x00%cI", commit])?;
let output = output.trim_end_matches('\n');
let mut parts = output.split('\0');
let name = parts
.next()
.filter(|value| !value.is_empty())
.ok_or_else(|| anyhow::anyhow!("commit {commit} has no committer name"))?;
let email = parts
.next()
.filter(|value| !value.is_empty())
.ok_or_else(|| anyhow::anyhow!("commit {commit} has no committer email"))?;
let date = parts
.next()
.filter(|value| !value.is_empty())
.ok_or_else(|| anyhow::anyhow!("commit {commit} has no committer date"))?;
if parts.next().is_some() {
bail!("commit {commit} has unexpected committer metadata");
}
Ok(CommitterIdentity {
name: name.to_string(),
email: email.to_string(),
date: date.to_string(),
})
}
pub fn is_ancestor(&self, ancestor: &str, descendant: &str) -> Result<bool> { pub fn is_ancestor(&self, ancestor: &str, descendant: &str) -> Result<bool> {
let status = self let status = self
.command() .command()
@@ -709,6 +777,35 @@ impl GitMirror {
.into()) .into())
} }
} }
fn worktree_git_with_committer<const N: usize>(
&self,
worktree: &Path,
args: [&str; N],
committer: &CommitterIdentity,
) -> Result<()> {
let output = Command::new("git")
.arg("-C")
.arg(worktree)
.args(args)
.env("GIT_COMMITTER_NAME", &committer.name)
.env("GIT_COMMITTER_EMAIL", &committer.email)
.env("GIT_COMMITTER_DATE", &committer.date)
.output()
.with_context(|| "failed to run git")?;
if output.status.success() {
Ok(())
} else {
Err(GitCommandError::new(
"git",
self.redactor
.redact(&String::from_utf8_lossy(&output.stdout)),
self.redactor
.redact(&String::from_utf8_lossy(&output.stderr)),
)
.into())
}
}
} }
fn short_sha(sha: &str) -> &str { fn short_sha(sha: &str) -> &str {
+277 -3
View File
@@ -1399,6 +1399,27 @@ fn push_repo_refs(
fail_on_unresolved_conflict(context, "branch deletion conflict")?; fail_on_unresolved_conflict(context, "branch deletion conflict")?;
} }
let (force_pushes, force_push_conflicts, force_push_branches) =
branch_force_push_decisions(mirror_repo, remotes, previous_refs, current_refs)?;
let had_force_push_conflicts = !force_push_conflicts.is_empty();
for conflict in &force_push_conflicts {
crate::logln!(
" {} branch {} has conflicting force-push changes on {} ({}, {})",
style("conflict").yellow().bold(),
style(&conflict.branch).cyan(),
conflict.remotes.join("+"),
conflict.reason,
style("skipped").dim()
);
}
if had_force_push_conflicts {
fail_on_unresolved_conflict(context, "branch force-push conflict")?;
}
let blocked_branches = blocked_branches
.union(&force_push_branches)
.cloned()
.collect::<BTreeSet<_>>();
let (branches, conflicts) = mirror_repo.branch_decisions(remotes)?; let (branches, conflicts) = mirror_repo.branch_decisions(remotes)?;
let branches_to_push = branches let branches_to_push = branches
.into_iter() .into_iter()
@@ -1423,6 +1444,7 @@ fn push_repo_refs(
} }
} }
let had_branch_conflicts = !unresolved_branch_conflicts.is_empty(); let had_branch_conflicts = !unresolved_branch_conflicts.is_empty();
let force_push_updates = force_push_updates(&force_pushes);
let unresolved_branch_names = unresolved_branch_conflicts let unresolved_branch_names = unresolved_branch_conflicts
.iter() .iter()
.map(|conflict| conflict.branch.clone()) .map(|conflict| conflict.branch.clone())
@@ -1463,13 +1485,17 @@ fn push_repo_refs(
let pushed_branch_names = branch_names(&branches_to_push); let pushed_branch_names = branch_names(&branches_to_push);
let rebased_branch_names = branch_names_from_updates(&rebased_branch_updates); let rebased_branch_names = branch_names_from_updates(&rebased_branch_updates);
let force_pushed_branch_names = branch_names_from_updates(&force_push_updates);
let mut cleanup_branches = stale_conflict_branches; let mut cleanup_branches = stale_conflict_branches;
cleanup_branches.retain(|branch| { cleanup_branches.retain(|branch| {
!pushed_branch_names.contains(branch) && !rebased_branch_names.contains(branch) !pushed_branch_names.contains(branch)
&& !rebased_branch_names.contains(branch)
&& !force_pushed_branch_names.contains(branch)
}); });
if branches_to_push.is_empty() if branches_to_push.is_empty()
&& rebased_branch_updates.is_empty() && rebased_branch_updates.is_empty()
&& force_push_updates.is_empty()
&& tags_to_push.is_empty() && tags_to_push.is_empty()
&& unresolved_branch_conflicts.is_empty() && unresolved_branch_conflicts.is_empty()
{ {
@@ -1499,7 +1525,10 @@ fn push_repo_refs(
); );
return Ok(RepoRefSyncResult { return Ok(RepoRefSyncResult {
pushed: false, pushed: false,
had_conflicts: had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts, had_conflicts: had_branch_conflicts
|| had_tag_conflicts
|| had_deletion_conflicts
|| had_force_push_conflicts,
}); });
} }
if !branch_deletions.is_empty() { if !branch_deletions.is_empty() {
@@ -1522,6 +1551,18 @@ fn push_repo_refs(
mirror_repo.push_branch_updates(remotes, &rebased_branch_updates)?; mirror_repo.push_branch_updates(remotes, &rebased_branch_updates)?;
close_resolved_pull_requests(context, mirror_repo, remotes, repos, &rebased_branch_names)?; close_resolved_pull_requests(context, mirror_repo, remotes, repos, &rebased_branch_names)?;
} }
if !force_push_updates.is_empty() {
print_branch_force_pushes(&force_pushes);
backup_force_pushed_branches(context, mirror_repo, repo_name, &force_pushes, current_refs)?;
mirror_repo.push_branch_updates(remotes, &force_push_updates)?;
close_resolved_pull_requests(
context,
mirror_repo,
remotes,
repos,
&force_pushed_branch_names,
)?;
}
if !tags_to_push.is_empty() { if !tags_to_push.is_empty() {
print_tag_decisions(&tags_to_push); print_tag_decisions(&tags_to_push);
mirror_repo.push_tags(remotes, &tags_to_push)?; mirror_repo.push_tags(remotes, &tags_to_push)?;
@@ -1541,10 +1582,14 @@ fn push_repo_refs(
Ok(RepoRefSyncResult { Ok(RepoRefSyncResult {
pushed: !branches_to_push.is_empty() pushed: !branches_to_push.is_empty()
|| !rebased_branch_updates.is_empty() || !rebased_branch_updates.is_empty()
|| !force_push_updates.is_empty()
|| !tags_to_push.is_empty() || !tags_to_push.is_empty()
|| !branch_deletions.is_empty() || !branch_deletions.is_empty()
|| !cleanup_branches.is_empty(), || !cleanup_branches.is_empty(),
had_conflicts: had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts, had_conflicts: had_branch_conflicts
|| had_tag_conflicts
|| had_deletion_conflicts
|| had_force_push_conflicts,
}) })
} }
@@ -1576,6 +1621,34 @@ fn backup_deleted_branches(
Ok(()) Ok(())
} }
fn backup_force_pushed_branches(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
repo_name: &str,
force_pushes: &[BranchForcePush],
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<()> {
if context.dry_run {
crate::logln!(
" {} {} force-push backup{}",
style("dry-run").yellow().bold(),
style("would create").dim(),
if force_pushes.len() == 1 { "" } else { "s" }
);
return Ok(());
}
let stamp = backup_stamp()?;
let backups = force_push_ref_backups(force_pushes, current_refs, &stamp);
if backups.is_empty() {
bail!("cannot back up force-push because no target branch refs were available");
}
let refs = mirror_repo.backup_refs(&backups)?;
let bundle_path = backup_dir(context, repo_name).join(format!("force-push-{stamp}.bundle"));
mirror_repo.create_bundle(&bundle_path, &refs)?;
Ok(())
}
fn backup_branches_deleted_everywhere( fn backup_branches_deleted_everywhere(
context: &RepoSyncContext<'_>, context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror, mirror_repo: &GitMirror,
@@ -1698,6 +1771,35 @@ fn log_rebase_decision(branch: &str, sha: &str, updates: &[BranchUpdate]) {
); );
} }
fn print_branch_force_pushes(force_pushes: &[BranchForcePush]) {
for force_push in force_pushes {
crate::logln!(
" {} branch {} {} -> {}",
style("force-push detected").green().bold(),
style(&force_push.branch).cyan(),
force_push.source_remotes.join("+"),
force_push.target_remotes.join("+")
);
}
}
fn force_push_updates(force_pushes: &[BranchForcePush]) -> Vec<BranchUpdate> {
force_pushes
.iter()
.flat_map(|force_push| {
force_push
.target_remotes
.iter()
.map(|target_remote| BranchUpdate {
branch: force_push.branch.clone(),
sha: force_push.sha.clone(),
target_remote: target_remote.clone(),
force: true,
})
})
.collect()
}
fn open_conflict_pull_requests( fn open_conflict_pull_requests(
context: &RepoSyncContext<'_>, context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror, mirror_repo: &GitMirror,
@@ -1968,6 +2070,38 @@ fn branch_ref_backups(
backups backups
} }
fn force_push_ref_backups(
force_pushes: &[BranchForcePush],
current_refs: &BTreeMap<String, RemoteRefState>,
stamp: &str,
) -> Vec<RefBackup> {
let mut backups = Vec::new();
for force_push in force_pushes {
for remote in &force_push.target_remotes {
let Some(sha) = current_refs
.get(remote)
.and_then(|refs| refs.branches.get(&force_push.branch))
else {
continue;
};
backups.push(RefBackup {
refname: format!(
"refs/refray-backups/force-pushes/{}/{}/{}",
hex_component(&force_push.branch),
stamp,
hex_component(remote)
),
sha: sha.clone(),
description: format!(
"branch {} from {} before propagated force-push",
force_push.branch, remote
),
});
}
}
backups
}
fn branches_deleted_everywhere_backups( fn branches_deleted_everywhere_backups(
previous_refs: &BTreeMap<String, RemoteRefState>, previous_refs: &BTreeMap<String, RemoteRefState>,
current_refs: &BTreeMap<String, RemoteRefState>, current_refs: &BTreeMap<String, RemoteRefState>,
@@ -2094,6 +2228,146 @@ fn safe_ref_component(value: &str) -> String {
output.trim_matches('-').to_string() output.trim_matches('-').to_string()
} }
#[derive(Clone, Debug)]
struct BranchForcePush {
branch: String,
sha: String,
source_remotes: Vec<String>,
target_remotes: Vec<String>,
}
struct BranchForcePushConflict {
branch: String,
remotes: Vec<String>,
reason: String,
}
fn branch_force_push_decisions(
mirror_repo: &GitMirror,
remotes: &[RemoteSpec],
previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<(
Vec<BranchForcePush>,
Vec<BranchForcePushConflict>,
BTreeSet<String>,
)> {
let Some(previous_refs) = previous_refs else {
return Ok((Vec::new(), Vec::new(), BTreeSet::new()));
};
let remote_names = remotes
.iter()
.map(|remote| remote.name.clone())
.collect::<Vec<_>>();
let mut branches = BTreeSet::new();
for refs in previous_refs.values() {
branches.extend(
refs.branches
.keys()
.filter(|branch| !is_internal_conflict_branch(branch))
.cloned(),
);
}
let mut decisions = Vec::new();
let mut conflicts = Vec::new();
let mut blocked = BTreeSet::new();
for branch in branches {
let previous_by_remote = remote_names
.iter()
.filter_map(|remote| {
previous_refs
.get(remote)
.and_then(|refs| refs.branches.get(&branch))
.map(|sha| (remote.clone(), sha.clone()))
})
.collect::<BTreeMap<_, _>>();
if previous_by_remote.len() != remote_names.len() {
continue;
}
let previous_tips = previous_by_remote
.values()
.cloned()
.collect::<BTreeSet<_>>();
if previous_tips.len() != 1 {
continue;
}
let current_by_remote = remote_names
.iter()
.filter_map(|remote| {
current_refs
.get(remote)
.and_then(|refs| refs.branches.get(&branch))
.map(|sha| (remote.clone(), sha.clone()))
})
.collect::<BTreeMap<_, _>>();
if current_by_remote.len() != remote_names.len() {
continue;
}
let mut target_remotes = Vec::new();
let mut fast_forward_remotes = Vec::new();
let mut force_pushed_by_tip = BTreeMap::<String, Vec<String>>::new();
for remote in &remote_names {
let previous = &previous_by_remote[remote];
let current = &current_by_remote[remote];
if previous == current {
target_remotes.push(remote.clone());
} else if mirror_repo.is_ancestor(previous, current)? {
fast_forward_remotes.push(remote.clone());
} else {
force_pushed_by_tip
.entry(current.clone())
.or_default()
.push(remote.clone());
}
}
if force_pushed_by_tip.is_empty() {
continue;
}
let force_pushed_remotes = force_pushed_by_tip
.values()
.flat_map(|remotes| remotes.iter().cloned())
.collect::<Vec<_>>();
if force_pushed_by_tip.len() == 1 && fast_forward_remotes.is_empty() {
let (sha, source_remotes) = force_pushed_by_tip.into_iter().next().unwrap();
if target_remotes.is_empty() {
continue;
}
blocked.insert(branch.clone());
decisions.push(BranchForcePush {
branch,
sha,
source_remotes,
target_remotes,
});
continue;
}
blocked.insert(branch.clone());
let reason = if force_pushed_by_tip.len() > 1 && !fast_forward_remotes.is_empty() {
format!(
"multiple rewritten tips and fast-forward changes on {}",
fast_forward_remotes.join("+")
)
} else if force_pushed_by_tip.len() > 1 {
"multiple rewritten tips".to_string()
} else {
format!("also fast-forwarded on {}", fast_forward_remotes.join("+"))
};
conflicts.push(BranchForcePushConflict {
branch,
remotes: force_pushed_remotes,
reason,
});
}
Ok((decisions, conflicts, blocked))
}
struct BranchDeletionConflict { struct BranchDeletionConflict {
branch: String, branch: String,
deleted_remotes: Vec<String>, deleted_remotes: Vec<String>,
+430 -2
View File
@@ -25,8 +25,7 @@ const WEBHOOK_SECRET: &str = "refray-e2e-secret";
#[test] #[test]
#[ignore = "destructive live-provider e2e test; run explicitly with --ignored"] #[ignore = "destructive live-provider e2e test; run explicitly with --ignored"]
fn sequential_live_e2e_all_supported_features() -> Result<()> { fn sequential_live_e2e_all_supported_features() -> Result<()> {
let env = EnvFile::load(Path::new(".env"))?; let settings = load_e2e_settings()?;
let settings = E2eSettings::from_env(&env)?;
settings.require_destructive_guard()?; settings.require_destructive_guard()?;
let mut run = E2eRun::new(settings)?; let mut run = E2eRun::new(settings)?;
@@ -59,6 +58,42 @@ fn sequential_live_e2e_all_supported_features() -> Result<()> {
Ok(()) Ok(())
} }
#[test]
#[ignore = "destructive live-provider e2e test; run explicitly with --ignored"]
fn sequential_live_e2e_force_push_detection() -> Result<()> {
let settings = load_e2e_settings()?;
settings.require_destructive_guard()?;
let mut run = E2eRun::new(settings)?;
run.preflight()?;
run.clear_repositories()?;
run.write_config(ConflictMode::AutoRebasePullRequest, None, true)?;
eprintln!("e2e phase: force-push rewind");
run.rewind_force_push_propagates()?;
eprintln!("e2e phase: force-push rewrite");
run.rewrite_force_push_propagates()?;
eprintln!("e2e phase: force-push fast-forward guard");
run.normal_fast_forward_still_syncs()?;
eprintln!("e2e phase: force-push conflict");
run.conflicting_force_pushes_are_not_propagated()?;
eprintln!("e2e phase: force-push plus fast-forward conflict");
run.force_push_plus_fast_forward_is_not_propagated()?;
eprintln!("e2e phase: feature branch force-push");
run.feature_branch_force_push_propagates()?;
run.clear_e2e_repositories()?;
Ok(())
}
fn load_e2e_settings() -> Result<E2eSettings> {
let env_path = std::env::var_os("REFRAY_E2E_ENV_FILE")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from(".env"));
let env = EnvFile::load(&env_path)?;
E2eSettings::from_env(&env)
}
struct EnvFile { struct EnvFile {
values: HashMap<String, String>, values: HashMap<String, String>,
} }
@@ -637,6 +672,218 @@ namespace = "{}"
Ok(()) Ok(())
} }
fn rewind_force_push_propagates(&self) -> Result<()> {
let repo = self.repo_name("force-rewind");
let source = self.primary_provider();
self.seed_all_main(&repo, "force rewind base", 1_700_001_701)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force rewind old",
1_700_001_702,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
self.force_push_provider_branch_to_sha(source, &repo, MAIN_BRANCH, &base)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &base)?;
self.assert_backup_bundle_contains(&repo, &old)?;
Ok(())
}
fn rewrite_force_push_propagates(&self) -> Result<()> {
let repo = self.repo_name("force-rewrite");
let source = self.primary_provider();
self.seed_all_main(&repo, "force rewrite base", 1_700_001_711)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force rewrite old",
1_700_001_712,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
let rewritten = self.force_rewrite_provider_branch_from(
source,
&repo,
MAIN_BRANCH,
&base,
"rewritten.txt",
"rewritten\n",
"force rewrite new",
1_700_001_713,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &rewritten)?;
self.assert_backup_bundle_contains(&repo, &old)?;
Ok(())
}
fn normal_fast_forward_still_syncs(&self) -> Result<()> {
let repo = self.repo_name("force-fast-forward");
let source = self.primary_provider();
self.seed_all_main(&repo, "force fast-forward base", 1_700_001_721)?;
self.sync_repo(&repo, [])?;
let newer = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"newer.txt",
"newer\n",
"normal fast-forward",
1_700_001_722,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &newer)
}
fn conflicting_force_pushes_are_not_propagated(&self) -> Result<()> {
let repo = self.repo_name("force-conflict");
let (source, peer) = self.provider_pair();
self.seed_all_main(&repo, "force conflict base", 1_700_001_731)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force conflict old",
1_700_001_732,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
self.force_rewrite_provider_branch_from(
source,
&repo,
MAIN_BRANCH,
&base,
"source.txt",
"source\n",
"source force rewrite",
1_700_001_733,
)?;
self.force_rewrite_provider_branch_from(
peer,
&repo,
MAIN_BRANCH,
&base,
"peer.txt",
"peer\n",
"peer force rewrite",
1_700_001_734,
)?;
let expected_refs = self.branch_refs_by_provider(&repo, MAIN_BRANCH)?;
self.write_config(ConflictMode::Fail, Some(&exact_pattern(&repo)), true)?;
self.sync_repo_expect_failure(&repo, [])?;
self.assert_branch_refs_match(&repo, MAIN_BRANCH, &expected_refs)?;
self.write_config(ConflictMode::AutoRebasePullRequest, None, true)?;
Ok(())
}
fn force_push_plus_fast_forward_is_not_propagated(&self) -> Result<()> {
let repo = self.repo_name("force-plus-fast-forward");
let (source, peer) = self.provider_pair();
self.seed_all_main(&repo, "force plus fast-forward base", 1_700_001_741)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force plus fast-forward old",
1_700_001_742,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
self.force_rewrite_provider_branch_from(
source,
&repo,
MAIN_BRANCH,
&base,
"rewritten.txt",
"rewritten\n",
"force plus fast-forward rewrite",
1_700_001_743,
)?;
self.commit_to_provider_branch(
peer,
&repo,
MAIN_BRANCH,
"peer-fast-forward.txt",
"peer fast-forward\n",
"peer fast-forward",
1_700_001_744,
)?;
let expected_refs = self.branch_refs_by_provider(&repo, MAIN_BRANCH)?;
self.write_config(ConflictMode::Fail, Some(&exact_pattern(&repo)), true)?;
self.sync_repo_expect_failure(&repo, [])?;
self.assert_branch_refs_match(&repo, MAIN_BRANCH, &expected_refs)?;
self.write_config(ConflictMode::AutoRebasePullRequest, None, true)?;
Ok(())
}
fn feature_branch_force_push_propagates(&self) -> Result<()> {
let repo = self.repo_name("force-feature");
let source = self.primary_provider();
let branch = "feature/force-push";
self.seed_all_main(&repo, "force feature base", 1_700_001_751)?;
self.sync_repo(&repo, [])?;
let main = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old_feature = self.create_provider_branch(
source,
&repo,
MAIN_BRANCH,
branch,
"feature.txt",
"feature\n",
"feature branch old",
1_700_001_752,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, branch, &old_feature)?;
let rewritten_feature = self.force_rewrite_provider_branch_from(
source,
&repo,
branch,
&main,
"feature-rewritten.txt",
"feature rewritten\n",
"feature branch rewrite",
1_700_001_753,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, branch, &rewritten_feature)?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &main)?;
self.assert_backup_bundle_contains(&repo, &old_feature)?;
Ok(())
}
fn webhook_commands_and_receiver_work(&self) -> Result<()> { fn webhook_commands_and_receiver_work(&self) -> Result<()> {
let repo = self.repo_name("webhook"); let repo = self.repo_name("webhook");
let source = self.primary_provider(); let source = self.primary_provider();
@@ -765,6 +1012,129 @@ namespace = "{}"
Ok(()) Ok(())
} }
#[allow(clippy::too_many_arguments)]
fn commit_to_provider_branch(
&self,
provider: &ProviderAccount,
repo: &str,
branch: &str,
path: &str,
contents: &str,
message: &str,
timestamp: i64,
) -> Result<String> {
let work = self.clone_repo(
provider,
repo,
&format!(
"commit-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
self.checkout_remote_branch(&work, branch)?;
write_commit(&work, path, contents, message, timestamp)?;
let sha = git_output(&work, ["rev-parse", "HEAD"])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "origin", &refspec])?;
provider.wait_branch(repo, branch, &sha)?;
provider.wait_repo_listed(repo)?;
Ok(sha)
}
#[allow(clippy::too_many_arguments)]
fn create_provider_branch(
&self,
provider: &ProviderAccount,
repo: &str,
base_branch: &str,
branch: &str,
path: &str,
contents: &str,
message: &str,
timestamp: i64,
) -> Result<String> {
let work = self.clone_repo(
provider,
repo,
&format!(
"branch-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
let base_ref = format!("origin/{base_branch}");
self.git(&work, ["checkout", "-B", branch, &base_ref])?;
write_commit(&work, path, contents, message, timestamp)?;
let sha = git_output(&work, ["rev-parse", "HEAD"])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "origin", &refspec])?;
provider.wait_branch(repo, branch, &sha)?;
provider.wait_repo_listed(repo)?;
Ok(sha)
}
fn force_push_provider_branch_to_sha(
&self,
provider: &ProviderAccount,
repo: &str,
branch: &str,
sha: &str,
) -> Result<()> {
let work = self.clone_repo(
provider,
repo,
&format!(
"force-to-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
self.checkout_remote_branch(&work, branch)?;
self.git(&work, ["reset", "--hard", sha])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "--force", "origin", &refspec])?;
provider.wait_branch(repo, branch, sha)?;
provider.wait_repo_listed(repo)
}
#[allow(clippy::too_many_arguments)]
fn force_rewrite_provider_branch_from(
&self,
provider: &ProviderAccount,
repo: &str,
branch: &str,
base_sha: &str,
path: &str,
contents: &str,
message: &str,
timestamp: i64,
) -> Result<String> {
let work = self.clone_repo(
provider,
repo,
&format!(
"force-rewrite-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
self.checkout_remote_branch(&work, branch)?;
self.git(&work, ["reset", "--hard", base_sha])?;
write_commit(&work, path, contents, message, timestamp)?;
let sha = git_output(&work, ["rev-parse", "HEAD"])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "--force", "origin", &refspec])?;
provider.wait_branch(repo, branch, &sha)?;
provider.wait_repo_listed(repo)?;
Ok(sha)
}
fn checkout_remote_branch(&self, work: &Path, branch: &str) -> Result<()> {
let remote_branch = format!("origin/{branch}");
self.git(work, ["checkout", "-B", branch, &remote_branch])
}
fn clone_repo(&self, provider: &ProviderAccount, repo: &str, label: &str) -> Result<PathBuf> { fn clone_repo(&self, provider: &ProviderAccount, repo: &str, label: &str) -> Result<PathBuf> {
let path = self.git_worktree(label)?; let path = self.git_worktree(label)?;
let remote_url = provider.authenticated_repo_url(repo)?; let remote_url = provider.authenticated_repo_url(repo)?;
@@ -1058,6 +1428,34 @@ namespace = "{}"
}) })
} }
fn assert_branch_all_at(&self, repo: &str, branch: &str, expected: &str) -> Result<()> {
retry("branch convergence to expected tip", || {
for (provider, actual) in self.branch_refs_by_provider(repo, branch)? {
if actual != expected {
bail!("branch {branch} on {provider} is at {actual}, expected {expected}");
}
}
Ok(())
})
}
fn assert_branch_refs_match(
&self,
repo: &str,
branch: &str,
expected: &BTreeMap<String, String>,
) -> Result<()> {
retry("branch refs unchanged", || {
let actual = self.branch_refs_by_provider(repo, branch)?;
if &actual != expected {
bail!(
"branch {branch} refs changed unexpectedly for {repo}: expected {expected:?}, got {actual:?}"
);
}
Ok(())
})
}
fn assert_branch_all_equal_after_optional_resync( fn assert_branch_all_equal_after_optional_resync(
&self, &self,
repo: &str, repo: &str,
@@ -1224,6 +1622,36 @@ namespace = "{}"
Ok(output) Ok(output)
} }
fn branch_refs_by_provider(
&self,
repo: &str,
branch: &str,
) -> Result<BTreeMap<String, String>> {
let mut output = BTreeMap::new();
for (provider, refs) in self.refs_by_provider(repo)? {
let sha =
refs.branches.get(branch).cloned().ok_or_else(|| {
anyhow!("branch {branch} missing on {provider} for repo {repo}")
})?;
output.insert(provider, sha);
}
Ok(output)
}
fn branch_sha(&self, provider: &ProviderAccount, repo: &str, branch: &str) -> Result<String> {
provider
.ls_remote(repo)?
.branches
.get(branch)
.cloned()
.ok_or_else(|| {
anyhow!(
"branch {branch} missing on {} for repo {repo}",
provider.site_name
)
})
}
fn unprotect_main_all(&self, repo: &str) -> Result<()> { fn unprotect_main_all(&self, repo: &str) -> Result<()> {
for provider in &self.settings.providers { for provider in &self.settings.providers {
provider.unprotect_branch(repo, MAIN_BRANCH)?; provider.unprotect_branch(repo, MAIN_BRANCH)?;
+56 -1
View File
@@ -178,7 +178,15 @@ fn auto_rebase_branch_conflict_replays_later_tip_and_marks_force_targets() {
let a_tip = fixture.commit_file("a", "a.txt", "a\n", 1_700_000_100); let a_tip = fixture.commit_file("a", "a.txt", "a\n", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main"); fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base); fixture.reset_hard(&base);
let b_tip = fixture.commit_file("b", "b.txt", "b\n", 1_700_000_200); let b_tip = fixture.commit_file_with_committer(
"b",
"b.txt",
"b\n",
1_700_000_200,
"Original Committer",
"original-committer@example.test",
1_700_000_250,
);
fixture.push_head(&fixture.remote_b, "main"); fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror(); let mirror = fixture.mirror();
@@ -205,6 +213,10 @@ fn auto_rebase_branch_conflict_replays_later_tip_and_marks_force_targets() {
.find(|update| update.target_remote == "b") .find(|update| update.target_remote == "b")
.unwrap(); .unwrap();
assert!(b_update.force); assert!(b_update.force);
assert_eq!(
fixture.mirror_committer(&decision.sha),
fixture.mirror_committer(&b_tip)
);
mirror mirror
.push_branch_updates(&fixture.remotes(), &decision.updates) .push_branch_updates(&fixture.remotes(), &decision.updates)
@@ -495,6 +507,35 @@ impl GitFixture {
self.head() self.head()
} }
fn commit_file_with_committer(
&self,
message: &str,
file_name: &str,
contents: &str,
author_timestamp: i64,
committer_name: &str,
committer_email: &str,
committer_timestamp: i64,
) -> String {
let path = self.work.join(file_name);
fs::write(path, contents).unwrap();
git(Some(&self.work), ["add", file_name]);
let author_date = format!("@{author_timestamp} +0000");
let committer_date = format!("@{committer_timestamp} +0000");
let output = Command::new("git")
.current_dir(&self.work)
.env("GIT_AUTHOR_DATE", &author_date)
.env("GIT_COMMITTER_NAME", committer_name)
.env("GIT_COMMITTER_EMAIL", committer_email)
.env("GIT_COMMITTER_DATE", &committer_date)
.args(["commit", "-m", message])
.output()
.unwrap();
assert_success(&output, "git commit");
self.head()
}
fn head(&self) -> String { fn head(&self) -> String {
git_output(Some(&self.work), ["rev-parse", "HEAD"]) git_output(Some(&self.work), ["rev-parse", "HEAD"])
} }
@@ -559,6 +600,20 @@ impl GitFixture {
.status .status
.success() .success()
} }
fn mirror_committer(&self, reference: &str) -> String {
git_output(
None,
[
"--git-dir",
self.mirror_path.to_str().unwrap(),
"show",
"-s",
"--format=%cn <%ce> %cI",
reference,
],
)
}
} }
fn git<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) { fn git<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) {