Compare commits

..

64 Commits

Author SHA1 Message Date
azalea ae2bd9aaa1 [+] Force push detection & sync
Build executables / Windows x86_64 (push) Has been cancelled
Build executables / macOS universal (push) Has been cancelled
Build executables / Linux arm64 musl static (push) Has been cancelled
Build executables / Linux x86_64 musl static (push) Has been cancelled
2026-05-11 06:04:23 +00:00
azalea 3638d774ea [F] Rebase commit committer should be kept 2026-05-10 21:42:12 +00:00
Azalea 953a677575 [+] MIT License 2026-05-10 21:32:47 +00:00
azalea 9dccd094a2 [+] My mailmap 2026-05-10 21:32:17 +00:00
azalea a24172bc6c [U] Lock 2026-05-10 20:24:05 +00:00
azalea 07c382935d [U] Bump version 2026-05-10 20:23:27 +00:00
azalea dbae958248 [-] Comment out unnecessary detail from readme 2026-05-10 17:04:02 +00:00
azalea 3efacc96dc [U] Revise README for installation and demo info
Updated installation instructions and demo generation details in README.
2026-05-10 12:24:39 -04:00
azalea 80895bdc76 [+] Demo 2026-05-10 16:18:35 +00:00
azalea 73d5127ee2 [F] Fix default branch 2026-05-10 15:23:50 +00:00
azalea fe1aa19ce3 [F] Name validation 2026-05-10 14:25:28 +00:00
azalea acde9f4f67 [O] Fast path for serve 2026-05-10 13:43:32 +00:00
azalea 273a814692 [U] Update logo size in README
Increased logo size from 50% to 70% in README.
2026-05-10 21:14:00 +08:00
azalea 78b18590a5 [O] Increase export res 2026-05-10 13:12:07 +00:00
azalea e104337737 [+] Logo 2026-05-10 13:08:18 +00:00
azalea 82a641b8c9 [O] Explicit delete missing question, local backups 2026-05-10 13:07:15 +00:00
azalea 582ea7c490 [O] Wording 2026-05-10 11:07:08 +00:00
azalea 7a699aee81 [-] Remove unnecessary multiple regex 2026-05-10 10:57:17 +00:00
azalea 04d8aee687 [F] Fix log sync (#6) 2026-05-10 14:14:53 +08:00
azalea 009ba9b247 [U] info 2026-05-10 05:46:50 +00:00
azalea fbf5534fe3 [F] Visibility sync 2026-05-10 01:36:06 +00:00
azalea b41f530d1e [O] Parallel everything (#5) 2026-05-10 09:28:38 +08:00
azalea 915a63a955 [F] Fix gitlab project listing 2026-05-10 01:07:47 +00:00
azalea b0469d80a7 [F] Fix webhook uninstall 2026-05-09 23:53:43 +00:00
azalea 260f42b973 [F] Webhook issues 2026-05-09 23:44:18 +00:00
azalea 44b1865b15 [F] Fix webhook uninstall missing url 2026-05-09 22:47:02 +00:00
azalea de88150445 [O] Webhook install respect filters 2026-05-09 22:38:50 +00:00
azalea f94a0f11b5 [F] Fix gitea pagination 2026-05-09 22:24:13 +00:00
azalea 0ee43ea58f [O] Test actual PR 2026-05-09 19:37:08 +00:00
azalea 10c55062eb [-] Remove legacy features 2026-05-09 17:41:20 +00:00
azalea f3c0b90a0d [U] Update deps 2026-05-09 17:07:45 +00:00
azalea 513bda3696 [O] UX 2026-05-09 08:22:31 +00:00
azalea 018f1f12d5 [O] UX 2026-05-08 16:24:23 +00:00
azalea 3d73f20c1f [+] End-to-end testing 2026-05-08 15:32:33 +00:00
azalea e43e555b37 [+] Track repo deletions 2026-05-08 07:01:32 +00:00
azalea bc6509ad59 [U] Readme 2026-05-08 06:59:17 +00:00
azalea f28f96f27c [U] Readme 2026-05-08 06:57:55 +00:00
azalea 527e69dc4a [O] Readme 2026-05-08 06:44:26 +00:00
azalea 566c3c1b59 [F] Fix build 2026-05-08 06:33:42 +00:00
azalea b7f3404f99 [+] Docker 2026-05-08 06:29:13 +00:00
azalea d19b061f7c [O] Rewrite readme 2026-05-08 05:58:02 +00:00
azalea 9a9cbba767 [M] Rebrand 2026-05-08 05:34:08 +00:00
azalea ebeb045c51 [+] Conflict resolution (#4)
* [+] Conflict resolution

* [F] Fix conflict resolution branches being synched
2026-05-08 13:16:13 +08:00
azalea 67dd55a1cf [O] Rework webhook 2026-05-08 04:13:45 +00:00
azalea 0566e97c6a [O] Better webhook error handling 2026-05-08 01:03:30 +00:00
azalea 60b6caf1a6 [O] Better interactive config 2026-05-08 00:52:37 +00:00
azalea 9c651728e4 [+] Github Actions builder 2026-05-07 18:48:04 +00:00
azalea 7fc3ab32ad [M] Move tests 2026-05-07 18:27:38 +00:00
azalea 17e3961267 [O] Cleanup codebase 2026-05-07 18:08:33 +00:00
azalea 19658c4ba9 [O] Clean up codebase 2026-05-07 16:41:18 +00:00
azalea 4545a5e515 Revert "[O] Use libraries"
This reverts commit b225c4bbd8.
2026-05-07 16:27:28 +00:00
azalea b225c4bbd8 [O] Use libraries 2026-05-07 15:03:47 +00:00
azalea c013ce1858 [+] Webhook mode 2026-05-07 04:55:49 +00:00
azalea 7b65d919d6 [+] Forgejo (#3)
* [+] Forgejo, tangled

* [-] Tangled
2026-05-07 12:45:01 +08:00
azalea 39ba96051c [F] Fix branch deletion awareness 2026-05-07 02:58:43 +00:00
azalea b70eaee2aa [F] Fix heuristic 2026-05-07 02:22:34 +00:00
azalea 92bcee49ea [O] Better multithreaded logging 2026-05-06 20:19:13 +00:00
azalea bb991d94f0 [+] Multithreading 2026-05-06 20:05:59 +00:00
azalea 61450e3a97 [O] Optimize sync with heuristic 2026-05-06 18:32:16 +00:00
azalea 18cf024b6e [+] Retry 2026-05-06 17:42:20 +00:00
azalea 3c0b3fc1e8 [O] Better CLI wizard (#2) 2026-05-04 06:49:25 +08:00
azalea 0cdabb09e6 [O] Better CLI wizard (#1) 2026-05-04 01:54:43 +08:00
azalea fd7e0db96d Initial commit 2026-05-03 17:27:50 +00:00
azalea 947e71d1fd [+] Create repo by codex 2026-05-03 17:24:24 +00:00
9 changed files with 890 additions and 12 deletions
+1
View File
@@ -0,0 +1 @@
Azalea <noreply@aza.moe> <22280294+hykilpikonna@users.noreply.github.com>
Generated
+1 -1
View File
@@ -1118,7 +1118,7 @@ dependencies = [
[[package]]
name = "refray"
version = "0.1.0"
version = "0.1.1"
dependencies = [
"anyhow",
"clap",
+1 -1
View File
@@ -1,6 +1,6 @@
[package]
name = "refray"
version = "0.1.0"
version = "0.1.1"
edition = "2024"
authors = ["Azalea"]
description = "∞-way read-write git mirroring tool"
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Azalea
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+2
View File
@@ -240,6 +240,8 @@ Conflict resolution strategies are configured per mirror group:
When a previously opened conflict pull request is merged, the next sync sees the merged branch as the winning tip, pushes it to the other endpoints, and closes stale `refray/conflicts/...` pull requests for that branch.
Force-pushes are propagated only when `refray` can infer intent from the previous successful sync state. If a branch previously matched everywhere, one endpoint rewrites that branch to a non-descendant tip, and the other endpoints still have the previous tip, `refray` writes local backup refs and a bundle under the work-dir `backups/` directory before force-pushing the rewritten tip to the other endpoints. If multiple endpoints rewrite the branch differently, or another endpoint also advances independently, the branch is treated as a conflict and skipped.
Repository and branch deletion are propagated only when it is safe to infer intent, and `refray` writes local backup refs and bundle files under the work-dir `backups/` directory before propagating those deletions. If a repository existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous synced refs, `refray` deletes it from the remaining endpoints instead of recreating it when `delete_missing = true`. If `delete_missing = false`, that missing repository is not treated as a deletion and normal missing-repository handling applies. If the repository was deleted everywhere, `refray` removes its saved sync state after creating a local backup from the mirror cache. If the repository was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
Branch deletion follows the same rule at branch scope: if a branch existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous tip, `refray` deletes it from the remaining endpoints instead of recreating it. If the branch was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
+101 -4
View File
@@ -86,6 +86,12 @@ pub struct GitMirror {
dry_run: bool,
}
struct CommitterIdentity {
name: String,
email: String,
date: String,
}
#[derive(Clone, Debug)]
pub struct Redactor {
secrets: Vec<String>,
@@ -582,6 +588,7 @@ impl GitMirror {
return Ok(format!("dry-run-rebased-{}", short_sha(tip)));
}
let commits = self.replay_commits(base, tip)?;
let worktree = tempfile::TempDir::new().context("failed to create temporary worktree")?;
let worktree_path = worktree.path().to_path_buf();
self.run([
@@ -589,12 +596,13 @@ impl GitMirror {
"add",
"--detach",
worktree_path.to_str().unwrap(),
tip,
onto,
])?;
let rebase_result = self.worktree_git(&worktree_path, ["rebase", "--onto", onto, base]);
if let Err(error) = rebase_result {
let _ = self.worktree_git(&worktree_path, ["rebase", "--abort"]);
let replay_result = self.replay_commits_preserving_committer(&worktree_path, &commits);
if let Err(error) = replay_result {
let _ = self.worktree_git(&worktree_path, ["cherry-pick", "--abort"]);
let _ = self.worktree_git(&worktree_path, ["reset", "--hard"]);
let _ = self.run([
"worktree",
"remove",
@@ -613,6 +621,66 @@ impl GitMirror {
Ok(rebased.trim().to_string())
}
fn replay_commits(&self, base: &str, tip: &str) -> Result<Vec<String>> {
let range = format!("{base}..{tip}");
Ok(self
.output([
"rev-list",
"--reverse",
"--topo-order",
"--no-merges",
&range,
])?
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.map(ToOwned::to_owned)
.collect())
}
fn replay_commits_preserving_committer(
&self,
worktree: &Path,
commits: &[String],
) -> Result<()> {
for commit in commits {
let committer = self.committer_identity(commit)?;
self.worktree_git(worktree, ["cherry-pick", "--no-commit", commit])?;
self.worktree_git_with_committer(
worktree,
["commit", "--no-gpg-sign", "-C", commit],
&committer,
)?;
}
Ok(())
}
fn committer_identity(&self, commit: &str) -> Result<CommitterIdentity> {
let output = self.output(["show", "-s", "--format=%cn%x00%ce%x00%cI", commit])?;
let output = output.trim_end_matches('\n');
let mut parts = output.split('\0');
let name = parts
.next()
.filter(|value| !value.is_empty())
.ok_or_else(|| anyhow::anyhow!("commit {commit} has no committer name"))?;
let email = parts
.next()
.filter(|value| !value.is_empty())
.ok_or_else(|| anyhow::anyhow!("commit {commit} has no committer email"))?;
let date = parts
.next()
.filter(|value| !value.is_empty())
.ok_or_else(|| anyhow::anyhow!("commit {commit} has no committer date"))?;
if parts.next().is_some() {
bail!("commit {commit} has unexpected committer metadata");
}
Ok(CommitterIdentity {
name: name.to_string(),
email: email.to_string(),
date: date.to_string(),
})
}
pub fn is_ancestor(&self, ancestor: &str, descendant: &str) -> Result<bool> {
let status = self
.command()
@@ -709,6 +777,35 @@ impl GitMirror {
.into())
}
}
fn worktree_git_with_committer<const N: usize>(
&self,
worktree: &Path,
args: [&str; N],
committer: &CommitterIdentity,
) -> Result<()> {
let output = Command::new("git")
.arg("-C")
.arg(worktree)
.args(args)
.env("GIT_COMMITTER_NAME", &committer.name)
.env("GIT_COMMITTER_EMAIL", &committer.email)
.env("GIT_COMMITTER_DATE", &committer.date)
.output()
.with_context(|| "failed to run git")?;
if output.status.success() {
Ok(())
} else {
Err(GitCommandError::new(
"git",
self.redactor
.redact(&String::from_utf8_lossy(&output.stdout)),
self.redactor
.redact(&String::from_utf8_lossy(&output.stderr)),
)
.into())
}
}
}
fn short_sha(sha: &str) -> &str {
+277 -3
View File
@@ -1399,6 +1399,27 @@ fn push_repo_refs(
fail_on_unresolved_conflict(context, "branch deletion conflict")?;
}
let (force_pushes, force_push_conflicts, force_push_branches) =
branch_force_push_decisions(mirror_repo, remotes, previous_refs, current_refs)?;
let had_force_push_conflicts = !force_push_conflicts.is_empty();
for conflict in &force_push_conflicts {
crate::logln!(
" {} branch {} has conflicting force-push changes on {} ({}, {})",
style("conflict").yellow().bold(),
style(&conflict.branch).cyan(),
conflict.remotes.join("+"),
conflict.reason,
style("skipped").dim()
);
}
if had_force_push_conflicts {
fail_on_unresolved_conflict(context, "branch force-push conflict")?;
}
let blocked_branches = blocked_branches
.union(&force_push_branches)
.cloned()
.collect::<BTreeSet<_>>();
let (branches, conflicts) = mirror_repo.branch_decisions(remotes)?;
let branches_to_push = branches
.into_iter()
@@ -1423,6 +1444,7 @@ fn push_repo_refs(
}
}
let had_branch_conflicts = !unresolved_branch_conflicts.is_empty();
let force_push_updates = force_push_updates(&force_pushes);
let unresolved_branch_names = unresolved_branch_conflicts
.iter()
.map(|conflict| conflict.branch.clone())
@@ -1463,13 +1485,17 @@ fn push_repo_refs(
let pushed_branch_names = branch_names(&branches_to_push);
let rebased_branch_names = branch_names_from_updates(&rebased_branch_updates);
let force_pushed_branch_names = branch_names_from_updates(&force_push_updates);
let mut cleanup_branches = stale_conflict_branches;
cleanup_branches.retain(|branch| {
!pushed_branch_names.contains(branch) && !rebased_branch_names.contains(branch)
!pushed_branch_names.contains(branch)
&& !rebased_branch_names.contains(branch)
&& !force_pushed_branch_names.contains(branch)
});
if branches_to_push.is_empty()
&& rebased_branch_updates.is_empty()
&& force_push_updates.is_empty()
&& tags_to_push.is_empty()
&& unresolved_branch_conflicts.is_empty()
{
@@ -1499,7 +1525,10 @@ fn push_repo_refs(
);
return Ok(RepoRefSyncResult {
pushed: false,
had_conflicts: had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts,
had_conflicts: had_branch_conflicts
|| had_tag_conflicts
|| had_deletion_conflicts
|| had_force_push_conflicts,
});
}
if !branch_deletions.is_empty() {
@@ -1522,6 +1551,18 @@ fn push_repo_refs(
mirror_repo.push_branch_updates(remotes, &rebased_branch_updates)?;
close_resolved_pull_requests(context, mirror_repo, remotes, repos, &rebased_branch_names)?;
}
if !force_push_updates.is_empty() {
print_branch_force_pushes(&force_pushes);
backup_force_pushed_branches(context, mirror_repo, repo_name, &force_pushes, current_refs)?;
mirror_repo.push_branch_updates(remotes, &force_push_updates)?;
close_resolved_pull_requests(
context,
mirror_repo,
remotes,
repos,
&force_pushed_branch_names,
)?;
}
if !tags_to_push.is_empty() {
print_tag_decisions(&tags_to_push);
mirror_repo.push_tags(remotes, &tags_to_push)?;
@@ -1541,10 +1582,14 @@ fn push_repo_refs(
Ok(RepoRefSyncResult {
pushed: !branches_to_push.is_empty()
|| !rebased_branch_updates.is_empty()
|| !force_push_updates.is_empty()
|| !tags_to_push.is_empty()
|| !branch_deletions.is_empty()
|| !cleanup_branches.is_empty(),
had_conflicts: had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts,
had_conflicts: had_branch_conflicts
|| had_tag_conflicts
|| had_deletion_conflicts
|| had_force_push_conflicts,
})
}
@@ -1576,6 +1621,34 @@ fn backup_deleted_branches(
Ok(())
}
fn backup_force_pushed_branches(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
repo_name: &str,
force_pushes: &[BranchForcePush],
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<()> {
if context.dry_run {
crate::logln!(
" {} {} force-push backup{}",
style("dry-run").yellow().bold(),
style("would create").dim(),
if force_pushes.len() == 1 { "" } else { "s" }
);
return Ok(());
}
let stamp = backup_stamp()?;
let backups = force_push_ref_backups(force_pushes, current_refs, &stamp);
if backups.is_empty() {
bail!("cannot back up force-push because no target branch refs were available");
}
let refs = mirror_repo.backup_refs(&backups)?;
let bundle_path = backup_dir(context, repo_name).join(format!("force-push-{stamp}.bundle"));
mirror_repo.create_bundle(&bundle_path, &refs)?;
Ok(())
}
fn backup_branches_deleted_everywhere(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
@@ -1698,6 +1771,35 @@ fn log_rebase_decision(branch: &str, sha: &str, updates: &[BranchUpdate]) {
);
}
fn print_branch_force_pushes(force_pushes: &[BranchForcePush]) {
for force_push in force_pushes {
crate::logln!(
" {} branch {} {} -> {}",
style("force-push detected").green().bold(),
style(&force_push.branch).cyan(),
force_push.source_remotes.join("+"),
force_push.target_remotes.join("+")
);
}
}
fn force_push_updates(force_pushes: &[BranchForcePush]) -> Vec<BranchUpdate> {
force_pushes
.iter()
.flat_map(|force_push| {
force_push
.target_remotes
.iter()
.map(|target_remote| BranchUpdate {
branch: force_push.branch.clone(),
sha: force_push.sha.clone(),
target_remote: target_remote.clone(),
force: true,
})
})
.collect()
}
fn open_conflict_pull_requests(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
@@ -1968,6 +2070,38 @@ fn branch_ref_backups(
backups
}
fn force_push_ref_backups(
force_pushes: &[BranchForcePush],
current_refs: &BTreeMap<String, RemoteRefState>,
stamp: &str,
) -> Vec<RefBackup> {
let mut backups = Vec::new();
for force_push in force_pushes {
for remote in &force_push.target_remotes {
let Some(sha) = current_refs
.get(remote)
.and_then(|refs| refs.branches.get(&force_push.branch))
else {
continue;
};
backups.push(RefBackup {
refname: format!(
"refs/refray-backups/force-pushes/{}/{}/{}",
hex_component(&force_push.branch),
stamp,
hex_component(remote)
),
sha: sha.clone(),
description: format!(
"branch {} from {} before propagated force-push",
force_push.branch, remote
),
});
}
}
backups
}
fn branches_deleted_everywhere_backups(
previous_refs: &BTreeMap<String, RemoteRefState>,
current_refs: &BTreeMap<String, RemoteRefState>,
@@ -2094,6 +2228,146 @@ fn safe_ref_component(value: &str) -> String {
output.trim_matches('-').to_string()
}
#[derive(Clone, Debug)]
struct BranchForcePush {
branch: String,
sha: String,
source_remotes: Vec<String>,
target_remotes: Vec<String>,
}
struct BranchForcePushConflict {
branch: String,
remotes: Vec<String>,
reason: String,
}
fn branch_force_push_decisions(
mirror_repo: &GitMirror,
remotes: &[RemoteSpec],
previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<(
Vec<BranchForcePush>,
Vec<BranchForcePushConflict>,
BTreeSet<String>,
)> {
let Some(previous_refs) = previous_refs else {
return Ok((Vec::new(), Vec::new(), BTreeSet::new()));
};
let remote_names = remotes
.iter()
.map(|remote| remote.name.clone())
.collect::<Vec<_>>();
let mut branches = BTreeSet::new();
for refs in previous_refs.values() {
branches.extend(
refs.branches
.keys()
.filter(|branch| !is_internal_conflict_branch(branch))
.cloned(),
);
}
let mut decisions = Vec::new();
let mut conflicts = Vec::new();
let mut blocked = BTreeSet::new();
for branch in branches {
let previous_by_remote = remote_names
.iter()
.filter_map(|remote| {
previous_refs
.get(remote)
.and_then(|refs| refs.branches.get(&branch))
.map(|sha| (remote.clone(), sha.clone()))
})
.collect::<BTreeMap<_, _>>();
if previous_by_remote.len() != remote_names.len() {
continue;
}
let previous_tips = previous_by_remote
.values()
.cloned()
.collect::<BTreeSet<_>>();
if previous_tips.len() != 1 {
continue;
}
let current_by_remote = remote_names
.iter()
.filter_map(|remote| {
current_refs
.get(remote)
.and_then(|refs| refs.branches.get(&branch))
.map(|sha| (remote.clone(), sha.clone()))
})
.collect::<BTreeMap<_, _>>();
if current_by_remote.len() != remote_names.len() {
continue;
}
let mut target_remotes = Vec::new();
let mut fast_forward_remotes = Vec::new();
let mut force_pushed_by_tip = BTreeMap::<String, Vec<String>>::new();
for remote in &remote_names {
let previous = &previous_by_remote[remote];
let current = &current_by_remote[remote];
if previous == current {
target_remotes.push(remote.clone());
} else if mirror_repo.is_ancestor(previous, current)? {
fast_forward_remotes.push(remote.clone());
} else {
force_pushed_by_tip
.entry(current.clone())
.or_default()
.push(remote.clone());
}
}
if force_pushed_by_tip.is_empty() {
continue;
}
let force_pushed_remotes = force_pushed_by_tip
.values()
.flat_map(|remotes| remotes.iter().cloned())
.collect::<Vec<_>>();
if force_pushed_by_tip.len() == 1 && fast_forward_remotes.is_empty() {
let (sha, source_remotes) = force_pushed_by_tip.into_iter().next().unwrap();
if target_remotes.is_empty() {
continue;
}
blocked.insert(branch.clone());
decisions.push(BranchForcePush {
branch,
sha,
source_remotes,
target_remotes,
});
continue;
}
blocked.insert(branch.clone());
let reason = if force_pushed_by_tip.len() > 1 && !fast_forward_remotes.is_empty() {
format!(
"multiple rewritten tips and fast-forward changes on {}",
fast_forward_remotes.join("+")
)
} else if force_pushed_by_tip.len() > 1 {
"multiple rewritten tips".to_string()
} else {
format!("also fast-forwarded on {}", fast_forward_remotes.join("+"))
};
conflicts.push(BranchForcePushConflict {
branch,
remotes: force_pushed_remotes,
reason,
});
}
Ok((decisions, conflicts, blocked))
}
struct BranchDeletionConflict {
branch: String,
deleted_remotes: Vec<String>,
+430 -2
View File
@@ -25,8 +25,7 @@ const WEBHOOK_SECRET: &str = "refray-e2e-secret";
#[test]
#[ignore = "destructive live-provider e2e test; run explicitly with --ignored"]
fn sequential_live_e2e_all_supported_features() -> Result<()> {
let env = EnvFile::load(Path::new(".env"))?;
let settings = E2eSettings::from_env(&env)?;
let settings = load_e2e_settings()?;
settings.require_destructive_guard()?;
let mut run = E2eRun::new(settings)?;
@@ -59,6 +58,42 @@ fn sequential_live_e2e_all_supported_features() -> Result<()> {
Ok(())
}
#[test]
#[ignore = "destructive live-provider e2e test; run explicitly with --ignored"]
fn sequential_live_e2e_force_push_detection() -> Result<()> {
let settings = load_e2e_settings()?;
settings.require_destructive_guard()?;
let mut run = E2eRun::new(settings)?;
run.preflight()?;
run.clear_repositories()?;
run.write_config(ConflictMode::AutoRebasePullRequest, None, true)?;
eprintln!("e2e phase: force-push rewind");
run.rewind_force_push_propagates()?;
eprintln!("e2e phase: force-push rewrite");
run.rewrite_force_push_propagates()?;
eprintln!("e2e phase: force-push fast-forward guard");
run.normal_fast_forward_still_syncs()?;
eprintln!("e2e phase: force-push conflict");
run.conflicting_force_pushes_are_not_propagated()?;
eprintln!("e2e phase: force-push plus fast-forward conflict");
run.force_push_plus_fast_forward_is_not_propagated()?;
eprintln!("e2e phase: feature branch force-push");
run.feature_branch_force_push_propagates()?;
run.clear_e2e_repositories()?;
Ok(())
}
fn load_e2e_settings() -> Result<E2eSettings> {
let env_path = std::env::var_os("REFRAY_E2E_ENV_FILE")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from(".env"));
let env = EnvFile::load(&env_path)?;
E2eSettings::from_env(&env)
}
struct EnvFile {
values: HashMap<String, String>,
}
@@ -637,6 +672,218 @@ namespace = "{}"
Ok(())
}
fn rewind_force_push_propagates(&self) -> Result<()> {
let repo = self.repo_name("force-rewind");
let source = self.primary_provider();
self.seed_all_main(&repo, "force rewind base", 1_700_001_701)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force rewind old",
1_700_001_702,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
self.force_push_provider_branch_to_sha(source, &repo, MAIN_BRANCH, &base)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &base)?;
self.assert_backup_bundle_contains(&repo, &old)?;
Ok(())
}
fn rewrite_force_push_propagates(&self) -> Result<()> {
let repo = self.repo_name("force-rewrite");
let source = self.primary_provider();
self.seed_all_main(&repo, "force rewrite base", 1_700_001_711)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force rewrite old",
1_700_001_712,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
let rewritten = self.force_rewrite_provider_branch_from(
source,
&repo,
MAIN_BRANCH,
&base,
"rewritten.txt",
"rewritten\n",
"force rewrite new",
1_700_001_713,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &rewritten)?;
self.assert_backup_bundle_contains(&repo, &old)?;
Ok(())
}
fn normal_fast_forward_still_syncs(&self) -> Result<()> {
let repo = self.repo_name("force-fast-forward");
let source = self.primary_provider();
self.seed_all_main(&repo, "force fast-forward base", 1_700_001_721)?;
self.sync_repo(&repo, [])?;
let newer = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"newer.txt",
"newer\n",
"normal fast-forward",
1_700_001_722,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &newer)
}
fn conflicting_force_pushes_are_not_propagated(&self) -> Result<()> {
let repo = self.repo_name("force-conflict");
let (source, peer) = self.provider_pair();
self.seed_all_main(&repo, "force conflict base", 1_700_001_731)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force conflict old",
1_700_001_732,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
self.force_rewrite_provider_branch_from(
source,
&repo,
MAIN_BRANCH,
&base,
"source.txt",
"source\n",
"source force rewrite",
1_700_001_733,
)?;
self.force_rewrite_provider_branch_from(
peer,
&repo,
MAIN_BRANCH,
&base,
"peer.txt",
"peer\n",
"peer force rewrite",
1_700_001_734,
)?;
let expected_refs = self.branch_refs_by_provider(&repo, MAIN_BRANCH)?;
self.write_config(ConflictMode::Fail, Some(&exact_pattern(&repo)), true)?;
self.sync_repo_expect_failure(&repo, [])?;
self.assert_branch_refs_match(&repo, MAIN_BRANCH, &expected_refs)?;
self.write_config(ConflictMode::AutoRebasePullRequest, None, true)?;
Ok(())
}
fn force_push_plus_fast_forward_is_not_propagated(&self) -> Result<()> {
let repo = self.repo_name("force-plus-fast-forward");
let (source, peer) = self.provider_pair();
self.seed_all_main(&repo, "force plus fast-forward base", 1_700_001_741)?;
self.sync_repo(&repo, [])?;
let base = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old = self.commit_to_provider_branch(
source,
&repo,
MAIN_BRANCH,
"old.txt",
"old\n",
"force plus fast-forward old",
1_700_001_742,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &old)?;
self.unprotect_main_all(&repo)?;
self.force_rewrite_provider_branch_from(
source,
&repo,
MAIN_BRANCH,
&base,
"rewritten.txt",
"rewritten\n",
"force plus fast-forward rewrite",
1_700_001_743,
)?;
self.commit_to_provider_branch(
peer,
&repo,
MAIN_BRANCH,
"peer-fast-forward.txt",
"peer fast-forward\n",
"peer fast-forward",
1_700_001_744,
)?;
let expected_refs = self.branch_refs_by_provider(&repo, MAIN_BRANCH)?;
self.write_config(ConflictMode::Fail, Some(&exact_pattern(&repo)), true)?;
self.sync_repo_expect_failure(&repo, [])?;
self.assert_branch_refs_match(&repo, MAIN_BRANCH, &expected_refs)?;
self.write_config(ConflictMode::AutoRebasePullRequest, None, true)?;
Ok(())
}
fn feature_branch_force_push_propagates(&self) -> Result<()> {
let repo = self.repo_name("force-feature");
let source = self.primary_provider();
let branch = "feature/force-push";
self.seed_all_main(&repo, "force feature base", 1_700_001_751)?;
self.sync_repo(&repo, [])?;
let main = self.branch_sha(source, &repo, MAIN_BRANCH)?;
let old_feature = self.create_provider_branch(
source,
&repo,
MAIN_BRANCH,
branch,
"feature.txt",
"feature\n",
"feature branch old",
1_700_001_752,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, branch, &old_feature)?;
let rewritten_feature = self.force_rewrite_provider_branch_from(
source,
&repo,
branch,
&main,
"feature-rewritten.txt",
"feature rewritten\n",
"feature branch rewrite",
1_700_001_753,
)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_at(&repo, branch, &rewritten_feature)?;
self.assert_branch_all_at(&repo, MAIN_BRANCH, &main)?;
self.assert_backup_bundle_contains(&repo, &old_feature)?;
Ok(())
}
fn webhook_commands_and_receiver_work(&self) -> Result<()> {
let repo = self.repo_name("webhook");
let source = self.primary_provider();
@@ -765,6 +1012,129 @@ namespace = "{}"
Ok(())
}
#[allow(clippy::too_many_arguments)]
fn commit_to_provider_branch(
&self,
provider: &ProviderAccount,
repo: &str,
branch: &str,
path: &str,
contents: &str,
message: &str,
timestamp: i64,
) -> Result<String> {
let work = self.clone_repo(
provider,
repo,
&format!(
"commit-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
self.checkout_remote_branch(&work, branch)?;
write_commit(&work, path, contents, message, timestamp)?;
let sha = git_output(&work, ["rev-parse", "HEAD"])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "origin", &refspec])?;
provider.wait_branch(repo, branch, &sha)?;
provider.wait_repo_listed(repo)?;
Ok(sha)
}
#[allow(clippy::too_many_arguments)]
fn create_provider_branch(
&self,
provider: &ProviderAccount,
repo: &str,
base_branch: &str,
branch: &str,
path: &str,
contents: &str,
message: &str,
timestamp: i64,
) -> Result<String> {
let work = self.clone_repo(
provider,
repo,
&format!(
"branch-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
let base_ref = format!("origin/{base_branch}");
self.git(&work, ["checkout", "-B", branch, &base_ref])?;
write_commit(&work, path, contents, message, timestamp)?;
let sha = git_output(&work, ["rev-parse", "HEAD"])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "origin", &refspec])?;
provider.wait_branch(repo, branch, &sha)?;
provider.wait_repo_listed(repo)?;
Ok(sha)
}
fn force_push_provider_branch_to_sha(
&self,
provider: &ProviderAccount,
repo: &str,
branch: &str,
sha: &str,
) -> Result<()> {
let work = self.clone_repo(
provider,
repo,
&format!(
"force-to-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
self.checkout_remote_branch(&work, branch)?;
self.git(&work, ["reset", "--hard", sha])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "--force", "origin", &refspec])?;
provider.wait_branch(repo, branch, sha)?;
provider.wait_repo_listed(repo)
}
#[allow(clippy::too_many_arguments)]
fn force_rewrite_provider_branch_from(
&self,
provider: &ProviderAccount,
repo: &str,
branch: &str,
base_sha: &str,
path: &str,
contents: &str,
message: &str,
timestamp: i64,
) -> Result<String> {
let work = self.clone_repo(
provider,
repo,
&format!(
"force-rewrite-{}-{}-{repo}",
provider.site_name,
sanitize_path(branch)
),
)?;
self.checkout_remote_branch(&work, branch)?;
self.git(&work, ["reset", "--hard", base_sha])?;
write_commit(&work, path, contents, message, timestamp)?;
let sha = git_output(&work, ["rev-parse", "HEAD"])?;
let refspec = format!("HEAD:{branch}");
self.git(&work, ["push", "--force", "origin", &refspec])?;
provider.wait_branch(repo, branch, &sha)?;
provider.wait_repo_listed(repo)?;
Ok(sha)
}
fn checkout_remote_branch(&self, work: &Path, branch: &str) -> Result<()> {
let remote_branch = format!("origin/{branch}");
self.git(work, ["checkout", "-B", branch, &remote_branch])
}
fn clone_repo(&self, provider: &ProviderAccount, repo: &str, label: &str) -> Result<PathBuf> {
let path = self.git_worktree(label)?;
let remote_url = provider.authenticated_repo_url(repo)?;
@@ -1058,6 +1428,34 @@ namespace = "{}"
})
}
fn assert_branch_all_at(&self, repo: &str, branch: &str, expected: &str) -> Result<()> {
retry("branch convergence to expected tip", || {
for (provider, actual) in self.branch_refs_by_provider(repo, branch)? {
if actual != expected {
bail!("branch {branch} on {provider} is at {actual}, expected {expected}");
}
}
Ok(())
})
}
fn assert_branch_refs_match(
&self,
repo: &str,
branch: &str,
expected: &BTreeMap<String, String>,
) -> Result<()> {
retry("branch refs unchanged", || {
let actual = self.branch_refs_by_provider(repo, branch)?;
if &actual != expected {
bail!(
"branch {branch} refs changed unexpectedly for {repo}: expected {expected:?}, got {actual:?}"
);
}
Ok(())
})
}
fn assert_branch_all_equal_after_optional_resync(
&self,
repo: &str,
@@ -1224,6 +1622,36 @@ namespace = "{}"
Ok(output)
}
fn branch_refs_by_provider(
&self,
repo: &str,
branch: &str,
) -> Result<BTreeMap<String, String>> {
let mut output = BTreeMap::new();
for (provider, refs) in self.refs_by_provider(repo)? {
let sha =
refs.branches.get(branch).cloned().ok_or_else(|| {
anyhow!("branch {branch} missing on {provider} for repo {repo}")
})?;
output.insert(provider, sha);
}
Ok(output)
}
fn branch_sha(&self, provider: &ProviderAccount, repo: &str, branch: &str) -> Result<String> {
provider
.ls_remote(repo)?
.branches
.get(branch)
.cloned()
.ok_or_else(|| {
anyhow!(
"branch {branch} missing on {} for repo {repo}",
provider.site_name
)
})
}
fn unprotect_main_all(&self, repo: &str) -> Result<()> {
for provider in &self.settings.providers {
provider.unprotect_branch(repo, MAIN_BRANCH)?;
+56 -1
View File
@@ -178,7 +178,15 @@ fn auto_rebase_branch_conflict_replays_later_tip_and_marks_force_targets() {
let a_tip = fixture.commit_file("a", "a.txt", "a\n", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
let b_tip = fixture.commit_file("b", "b.txt", "b\n", 1_700_000_200);
let b_tip = fixture.commit_file_with_committer(
"b",
"b.txt",
"b\n",
1_700_000_200,
"Original Committer",
"original-committer@example.test",
1_700_000_250,
);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
@@ -205,6 +213,10 @@ fn auto_rebase_branch_conflict_replays_later_tip_and_marks_force_targets() {
.find(|update| update.target_remote == "b")
.unwrap();
assert!(b_update.force);
assert_eq!(
fixture.mirror_committer(&decision.sha),
fixture.mirror_committer(&b_tip)
);
mirror
.push_branch_updates(&fixture.remotes(), &decision.updates)
@@ -495,6 +507,35 @@ impl GitFixture {
self.head()
}
fn commit_file_with_committer(
&self,
message: &str,
file_name: &str,
contents: &str,
author_timestamp: i64,
committer_name: &str,
committer_email: &str,
committer_timestamp: i64,
) -> String {
let path = self.work.join(file_name);
fs::write(path, contents).unwrap();
git(Some(&self.work), ["add", file_name]);
let author_date = format!("@{author_timestamp} +0000");
let committer_date = format!("@{committer_timestamp} +0000");
let output = Command::new("git")
.current_dir(&self.work)
.env("GIT_AUTHOR_DATE", &author_date)
.env("GIT_COMMITTER_NAME", committer_name)
.env("GIT_COMMITTER_EMAIL", committer_email)
.env("GIT_COMMITTER_DATE", &committer_date)
.args(["commit", "-m", message])
.output()
.unwrap();
assert_success(&output, "git commit");
self.head()
}
fn head(&self) -> String {
git_output(Some(&self.work), ["rev-parse", "HEAD"])
}
@@ -559,6 +600,20 @@ impl GitFixture {
.status
.success()
}
fn mirror_committer(&self, reference: &str) -> String {
git_output(
None,
[
"--git-dir",
self.mirror_path.to_str().unwrap(),
"show",
"-s",
"--format=%cn <%ce> %cI",
reference,
],
)
}
}
fn git<const N: usize>(current_dir: Option<&Path>, args: [&str; N]) {