[+] Conflict resolution (#4)

* [+] Conflict resolution

* [F] Fix conflict resolution branches being synched
This commit is contained in:
2026-05-08 01:16:13 -04:00
committed by GitHub
parent 67dd55a1cf
commit ebeb045c51
13 changed files with 1370 additions and 37 deletions
+11 -1
View File
@@ -204,9 +204,18 @@ Branch conflict handling is intentionally conservative:
- If all endpoints agree on a branch tip, that tip is pushed everywhere. - If all endpoints agree on a branch tip, that tip is pushed everywhere.
- If one branch tip is a descendant of the others, the descendant wins and is pushed everywhere. - If one branch tip is a descendant of the others, the descendant wins and is pushed everywhere.
- If branch tips diverged, that branch is skipped and reported. - If branch tips diverged, `conflict_resolution` controls what happens.
- If `allow_force = true` or `git-sync sync --force` is used, a diverged branch chooses the newest commit timestamp and force-pushes it. - If `allow_force = true` or `git-sync sync --force` is used, a diverged branch chooses the newest commit timestamp and force-pushes it.
Conflict resolution strategies are configured per mirror group:
- `fail`: fail the repository sync when branch tips diverge.
- `auto_rebase`: rebase divergent commits in endpoint order into one branch history, push fast-forward updates normally, and force-push only endpoints whose original tip was rewritten. If rebase hits a file conflict, fail.
- `pull_request`: push temporary `git-sync/conflicts/...` branches and open provider pull requests/merge requests so a person can resolve the divergence.
- `auto_rebase_pull_request`: try `auto_rebase` first, then fall back to pull requests if rebase hits a conflict.
When a previously opened conflict pull request is merged, the next sync sees the merged branch as the winning tip, pushes it to the other endpoints, and closes stale `git-sync/conflicts/...` pull requests for that branch.
Branch deletion is propagated only when it is safe to infer intent. If a branch existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous tip, `git-sync` deletes it from the remaining endpoints instead of recreating it. If the branch was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped. Branch deletion is propagated only when it is safe to infer intent. If a branch existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous tip, `git-sync` deletes it from the remaining endpoints instead of recreating it. If the branch was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
Tags are fetched into provider-specific cache refs and pushed only when the tag object agrees across providers or exists on one side. Divergent tags are skipped and reported. Tag deletion is not propagated. Tags are fetched into provider-specific cache refs and pushed only when the tag object agrees across providers or exists on one side. Divergent tags are skipped and reported. Tag deletion is not propagated.
@@ -231,6 +240,7 @@ name = "personal"
create_missing = true create_missing = true
visibility = "private" visibility = "private"
allow_force = false allow_force = false
conflict_resolution = "auto_rebase_pull_request"
[[mirrors.endpoints]] [[mirrors.endpoints]]
site = "github" site = "github"
+12
View File
@@ -55,6 +55,18 @@ pub struct MirrorConfig {
pub visibility: Visibility, pub visibility: Visibility,
#[serde(default)] #[serde(default)]
pub allow_force: bool, pub allow_force: bool,
#[serde(default)]
pub conflict_resolution: ConflictResolutionStrategy,
}
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum ConflictResolutionStrategy {
#[default]
Fail,
AutoRebase,
PullRequest,
AutoRebasePullRequest,
} }
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
+205
View File
@@ -44,6 +44,21 @@ pub struct BranchDeletion {
pub target_remotes: Vec<String>, pub target_remotes: Vec<String>,
} }
#[derive(Clone, Debug)]
pub struct BranchUpdate {
pub branch: String,
pub sha: String,
pub target_remote: String,
pub force: bool,
}
#[derive(Clone, Debug)]
pub struct BranchRebaseDecision {
pub branch: String,
pub sha: String,
pub updates: Vec<BranchUpdate>,
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct TagDecision { pub struct TagDecision {
pub tag: String, pub tag: String,
@@ -299,6 +314,91 @@ impl GitMirror {
Ok(()) Ok(())
} }
pub fn push_branch_updates(
&self,
remotes: &[RemoteSpec],
updates: &[BranchUpdate],
) -> Result<()> {
for update in updates {
let remote = remotes
.iter()
.find(|remote| remote.name == update.target_remote)
.with_context(|| format!("unknown remote '{}'", update.target_remote))?;
self.push_branch_update(remote, update)?;
}
Ok(())
}
pub fn remote_branch_names_with_prefix(
&self,
remote: &str,
prefix: &str,
) -> Result<Vec<String>> {
Ok(self
.remote_branches(remote)?
.into_iter()
.filter_map(|(branch, _)| branch.starts_with(prefix).then_some(branch))
.collect())
}
pub fn auto_rebase_branch_conflict(
&self,
remotes: &[RemoteSpec],
branch: &str,
tips: &[(String, String)],
) -> Result<BranchRebaseDecision> {
let mut ordered_tips = Vec::new();
for remote in remotes {
let Some((_, sha)) = tips.iter().find(|(name, _)| name == &remote.name) else {
continue;
};
if !ordered_tips
.iter()
.any(|(_, existing): &(String, String)| existing == sha)
{
ordered_tips.push((remote.name.clone(), sha.clone()));
}
}
if ordered_tips.len() < 2 {
bail!("branch {branch} does not have enough unique tips to auto-rebase");
}
let mut truth = ordered_tips[0].1.clone();
for (remote, sha) in ordered_tips.iter().skip(1) {
let base = self.merge_base(&truth, sha)?;
crate::logln!(
" {} branch {} rebasing {}@{} onto {}",
style("auto-rebase").cyan().bold(),
style(branch).cyan(),
remote,
short_sha(sha),
short_sha(&truth)
);
truth = self
.rebase_tip_onto(&truth, &base, sha)
.with_context(|| format!("auto-rebase failed for branch {branch}"))?;
}
let mut updates = Vec::new();
for (remote, sha) in tips {
if sha == &truth {
continue;
}
updates.push(BranchUpdate {
branch: branch.to_string(),
sha: truth.clone(),
target_remote: remote.clone(),
force: !self.is_ancestor(sha, &truth)?,
});
}
Ok(BranchRebaseDecision {
branch: branch.to_string(),
sha: truth,
updates,
})
}
pub fn push_tags(&self, remotes: &[RemoteSpec], tags: &[TagDecision]) -> Result<()> { pub fn push_tags(&self, remotes: &[RemoteSpec], tags: &[TagDecision]) -> Result<()> {
for remote in remotes { for remote in remotes {
for tag in tags { for tag in tags {
@@ -343,6 +443,23 @@ impl GitMirror {
Ok(()) Ok(())
} }
fn push_branch_update(&self, remote: &RemoteSpec, update: &BranchUpdate) -> Result<()> {
let refspec = if update.force {
format!("+{}:refs/heads/{}", update.sha, update.branch)
} else {
format!("{}:refs/heads/{}", update.sha, update.branch)
};
let label = if update.force { "force-push" } else { "push" };
crate::logln!(
" {} {} {} {}",
style(label).green().bold(),
style("branch").dim(),
style(&update.branch).cyan(),
style(format!("-> {}", remote.display)).dim()
);
self.run(["push", &remote.name, &refspec])
}
fn remote_url(&self, name: &str) -> Result<Option<String>> { fn remote_url(&self, name: &str) -> Result<Option<String>> {
let output = self let output = self
.command() .command()
@@ -432,6 +549,46 @@ impl GitMirror {
.context("no commits found while choosing force winner") .context("no commits found while choosing force winner")
} }
fn merge_base(&self, left: &str, right: &str) -> Result<String> {
Ok(self.output(["merge-base", left, right])?.trim().to_string())
}
fn rebase_tip_onto(&self, onto: &str, base: &str, tip: &str) -> Result<String> {
if self.dry_run {
return Ok(format!("dry-run-rebased-{}", short_sha(tip)));
}
let worktree = tempfile::TempDir::new().context("failed to create temporary worktree")?;
let worktree_path = worktree.path().to_path_buf();
self.run([
"worktree",
"add",
"--detach",
worktree_path.to_str().unwrap(),
tip,
])?;
let rebase_result = self.worktree_git(&worktree_path, ["rebase", "--onto", onto, base]);
if let Err(error) = rebase_result {
let _ = self.worktree_git(&worktree_path, ["rebase", "--abort"]);
let _ = self.run([
"worktree",
"remove",
"--force",
worktree_path.to_str().unwrap(),
]);
return Err(error);
}
let rebased = self.worktree_git_output(&worktree_path, ["rev-parse", "HEAD"])?;
self.run([
"worktree",
"remove",
"--force",
worktree_path.to_str().unwrap(),
])?;
Ok(rebased.trim().to_string())
}
fn is_ancestor(&self, ancestor: &str, descendant: &str) -> Result<bool> { fn is_ancestor(&self, ancestor: &str, descendant: &str) -> Result<bool> {
let status = self let status = self
.command() .command()
@@ -484,6 +641,54 @@ impl GitMirror {
command.arg("--git-dir").arg(&self.path); command.arg("--git-dir").arg(&self.path);
command command
} }
fn worktree_git<const N: usize>(&self, worktree: &Path, args: [&str; N]) -> Result<()> {
run_plain(
"git",
[
"-C",
worktree.to_str().unwrap(),
"-c",
"user.name=git-sync",
"-c",
"user.email=git-sync@example.invalid",
]
.into_iter()
.chain(args),
None,
&self.redactor,
self.dry_run,
)
}
fn worktree_git_output<const N: usize>(
&self,
worktree: &Path,
args: [&str; N],
) -> Result<String> {
let output = Command::new("git")
.arg("-C")
.arg(worktree)
.args(args)
.output()
.with_context(|| "failed to run git")?;
if output.status.success() {
Ok(String::from_utf8_lossy(&output.stdout).to_string())
} else {
Err(GitCommandError::new(
"git",
self.redactor
.redact(&String::from_utf8_lossy(&output.stdout)),
self.redactor
.redact(&String::from_utf8_lossy(&output.stderr)),
)
.into())
}
}
}
fn short_sha(sha: &str) -> &str {
sha.get(..12).unwrap_or(sha)
} }
pub fn ls_remote_refs(remote: &RemoteSpec, redactor: &Redactor) -> Result<RemoteRefSnapshot> { pub fn ls_remote_refs(remote: &RemoteSpec, redactor: &Redactor) -> Result<RemoteRefSnapshot> {
+63 -4
View File
@@ -14,8 +14,8 @@ use tiny_http::{Request, Response, Server, StatusCode};
use url::Url; use url::Url;
use crate::config::{ use crate::config::{
Config, EndpointConfig, MirrorConfig, NamespaceKind, ProviderKind, SiteConfig, TokenConfig, Config, ConflictResolutionStrategy, EndpointConfig, MirrorConfig, NamespaceKind, ProviderKind,
Visibility, WebhookConfig, SiteConfig, TokenConfig, Visibility, WebhookConfig,
}; };
use crate::provider::ProviderClient; use crate::provider::ProviderClient;
use crate::webhook::check_webhook_url_reachable; use crate::webhook::check_webhook_url_reachable;
@@ -98,12 +98,14 @@ enum WizardAction {
fn add_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<()> { fn add_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<()> {
let endpoints = prompt_sync_group_endpoints_styled(config, theme, &[])?; let endpoints = prompt_sync_group_endpoints_styled(config, theme, &[])?;
let conflict_resolution = prompt_conflict_resolution_styled(theme, None)?;
config.upsert_mirror(MirrorConfig { config.upsert_mirror(MirrorConfig {
name: next_mirror_name(config), name: next_mirror_name(config),
endpoints, endpoints,
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution,
}); });
prompt_webhook_setup_styled(config, theme)?; prompt_webhook_setup_styled(config, theme)?;
@@ -410,8 +412,12 @@ fn edit_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<
style(format!("sync group {}", index + 1)).cyan() style(format!("sync group {}", index + 1)).cyan()
); );
let existing = config.mirrors[index].endpoints.clone(); let existing = config.mirrors[index].endpoints.clone();
let existing_conflict_resolution = config.mirrors[index].conflict_resolution.clone();
let endpoints = prompt_sync_group_endpoints_styled(config, theme, &existing)?; let endpoints = prompt_sync_group_endpoints_styled(config, theme, &existing)?;
let conflict_resolution =
prompt_conflict_resolution_styled(theme, Some(&existing_conflict_resolution))?;
config.mirrors[index].endpoints = endpoints; config.mirrors[index].endpoints = endpoints;
config.mirrors[index].conflict_resolution = conflict_resolution;
prompt_webhook_setup_styled(config, theme)?; prompt_webhook_setup_styled(config, theme)?;
println!( println!(
"{} {}", "{} {}",
@@ -688,6 +694,43 @@ fn prompt_namespace_kind_styled(theme: &ColorfulTheme, namespace: &str) -> Resul
}) })
} }
fn prompt_conflict_resolution_styled(
theme: &ColorfulTheme,
existing: Option<&ConflictResolutionStrategy>,
) -> Result<ConflictResolutionStrategy> {
let options = [
"Fail",
"Auto-rebase; fail on file conflict",
"Pull request",
"Auto-rebase; pull request on file conflict (recommended)",
];
let default = existing.map(conflict_resolution_index).unwrap_or(3);
let index = Select::with_theme(theme)
.with_prompt("How should git-sync resolve branch conflicts?")
.items(options)
.default(default)
.interact()?;
Ok(conflict_resolution_from_index(index))
}
fn conflict_resolution_index(strategy: &ConflictResolutionStrategy) -> usize {
match strategy {
ConflictResolutionStrategy::Fail => 0,
ConflictResolutionStrategy::AutoRebase => 1,
ConflictResolutionStrategy::PullRequest => 2,
ConflictResolutionStrategy::AutoRebasePullRequest => 3,
}
}
fn conflict_resolution_from_index(index: usize) -> ConflictResolutionStrategy {
match index {
0 => ConflictResolutionStrategy::Fail,
1 => ConflictResolutionStrategy::AutoRebase,
2 => ConflictResolutionStrategy::PullRequest,
_ => ConflictResolutionStrategy::AutoRebasePullRequest,
}
}
fn print_sync_groups(config: &Config) { fn print_sync_groups(config: &Config) {
println!(); println!();
println!("{}", style("Sync groups").cyan().bold()); println!("{}", style("Sync groups").cyan().bold());
@@ -725,7 +768,7 @@ fn sync_group_summaries(config: &Config) -> Vec<String> {
} }
fn sync_group_summary(config: &Config, mirror: &MirrorConfig) -> String { fn sync_group_summary(config: &Config, mirror: &MirrorConfig) -> String {
mirror let endpoints = mirror
.endpoints .endpoints
.iter() .iter()
.map(|endpoint| { .map(|endpoint| {
@@ -735,7 +778,23 @@ fn sync_group_summary(config: &Config, mirror: &MirrorConfig) -> String {
.unwrap_or_else(|| format!("{}:{}", endpoint.site, endpoint.namespace)) .unwrap_or_else(|| format!("{}:{}", endpoint.site, endpoint.namespace))
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" <-> ") .join(" <-> ");
format!(
"{} ({})",
endpoints,
conflict_resolution_label(&mirror.conflict_resolution)
)
}
fn conflict_resolution_label(strategy: &ConflictResolutionStrategy) -> &'static str {
match strategy {
ConflictResolutionStrategy::Fail => "conflicts: fail",
ConflictResolutionStrategy::AutoRebase => "conflicts: auto-rebase",
ConflictResolutionStrategy::PullRequest => "conflicts: pull request",
ConflictResolutionStrategy::AutoRebasePullRequest => {
"conflicts: auto-rebase + pull request"
}
}
} }
fn print_pat_instructions( fn print_pat_instructions(
+308
View File
@@ -23,6 +23,19 @@ pub struct EndpointRepo {
pub repo: RemoteRepo, pub repo: RemoteRepo,
} }
#[derive(Clone, Debug)]
pub struct PullRequestRequest {
pub title: String,
pub body: String,
pub head_branch: String,
pub base_branch: String,
}
#[derive(Clone, Debug)]
pub struct PullRequestInfo {
pub url: Option<String>,
}
pub struct ProviderClient<'a> { pub struct ProviderClient<'a> {
site: &'a SiteConfig, site: &'a SiteConfig,
token: String, token: String,
@@ -120,6 +133,33 @@ impl<'a> ProviderClient<'a> {
) )
} }
pub fn open_pull_request(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
request: &PullRequestRequest,
) -> Result<PullRequestInfo> {
dispatch_provider!(self.site.provider,
github => self.github_open_pull_request(endpoint, repo, request),
gitlab => self.gitlab_open_pull_request(endpoint, repo, request),
gitea_like => self.gitea_open_pull_request(endpoint, repo, request),
)
}
pub fn close_pull_requests_by_head_prefix(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
base_branch: &str,
head_prefix: &str,
) -> Result<usize> {
dispatch_provider!(self.site.provider,
github => self.github_close_pull_requests_by_head_prefix(endpoint, repo, base_branch, head_prefix),
gitlab => self.gitlab_close_pull_requests_by_head_prefix(endpoint, repo, base_branch, head_prefix),
gitea_like => self.gitea_close_pull_requests_by_head_prefix(endpoint, repo, base_branch, head_prefix),
)
}
pub fn validate_token(&self) -> Result<()> { pub fn validate_token(&self) -> Result<()> {
let url = format!("{}/user", self.site.api_base()); let url = format!("{}/user", self.site.api_base());
self.get(&url).map(|_| ()) self.get(&url).map(|_| ())
@@ -238,6 +278,74 @@ impl<'a> ProviderClient<'a> {
self.delete_matching_hook(&hooks_url, url) self.delete_matching_hook(&hooks_url, url)
} }
fn github_open_pull_request(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
request: &PullRequestRequest,
) -> Result<PullRequestInfo> {
let url = self.repo_pulls_url(endpoint, &repo.name, "GitHub")?;
if let Some(existing) =
self.github_find_open_pull_request(&url, &request.head_branch, &request.base_branch)?
{
return Ok(PullRequestInfo {
url: existing.url(),
});
}
let body = json!({
"title": request.title,
"body": request.body,
"head": request.head_branch,
"base": request.base_branch,
});
let created: ProviderPullRequest = self.post_json(&url, &body)?;
Ok(PullRequestInfo { url: created.url() })
}
fn github_find_open_pull_request(
&self,
pulls_url: &str,
head_branch: &str,
base_branch: &str,
) -> Result<Option<ProviderPullRequest>> {
let url = format!(
"{pulls_url}?state=open&base={}&per_page=100",
urlencoding(base_branch)
);
let pulls: Vec<ProviderPullRequest> = self.paged_get(&url)?;
Ok(pulls
.into_iter()
.find(|pull| pull.head_ref() == Some(head_branch)))
}
fn github_close_pull_requests_by_head_prefix(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
base_branch: &str,
head_prefix: &str,
) -> Result<usize> {
let pulls_url = self.repo_pulls_url(endpoint, &repo.name, "GitHub")?;
let url = format!(
"{pulls_url}?state=open&base={}&per_page=100",
urlencoding(base_branch)
);
let pulls: Vec<ProviderPullRequest> = self.paged_get(&url)?;
let mut closed = 0;
for pull in pulls.into_iter().filter(|pull| {
pull.head_ref()
.is_some_and(|head| head.starts_with(head_prefix))
}) {
let Some(number) = pull.number else {
continue;
};
let update_url = format!("{pulls_url}/{number}");
self.patch_json::<serde_json::Value>(&update_url, &json!({ "state": "closed" }))?;
closed += 1;
}
Ok(closed)
}
fn gitlab_list_repos(&self, endpoint: &EndpointConfig) -> Result<Vec<RemoteRepo>> { fn gitlab_list_repos(&self, endpoint: &EndpointConfig) -> Result<Vec<RemoteRepo>> {
match endpoint.kind { match endpoint.kind {
NamespaceKind::User => { NamespaceKind::User => {
@@ -342,6 +450,75 @@ impl<'a> ProviderClient<'a> {
self.delete_matching_hook(&hooks_url, url) self.delete_matching_hook(&hooks_url, url)
} }
fn gitlab_open_pull_request(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
request: &PullRequestRequest,
) -> Result<PullRequestInfo> {
let url = self.gitlab_merge_requests_url(endpoint, &repo.name);
if let Some(existing) =
self.gitlab_find_open_merge_request(&url, &request.head_branch, &request.base_branch)?
{
return Ok(PullRequestInfo {
url: existing.url(),
});
}
let body = json!({
"title": request.title,
"description": request.body,
"source_branch": request.head_branch,
"target_branch": request.base_branch,
});
let created: ProviderPullRequest = self.post_json(&url, &body)?;
Ok(PullRequestInfo { url: created.url() })
}
fn gitlab_find_open_merge_request(
&self,
merge_requests_url: &str,
source_branch: &str,
target_branch: &str,
) -> Result<Option<ProviderPullRequest>> {
let url = format!(
"{merge_requests_url}?state=opened&source_branch={}&target_branch={}&per_page=100",
urlencoding(source_branch),
urlencoding(target_branch)
);
let pulls: Vec<ProviderPullRequest> = self.paged_get(&url)?;
Ok(pulls
.into_iter()
.find(|pull| pull.head_ref() == Some(source_branch)))
}
fn gitlab_close_pull_requests_by_head_prefix(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
base_branch: &str,
head_prefix: &str,
) -> Result<usize> {
let merge_requests_url = self.gitlab_merge_requests_url(endpoint, &repo.name);
let url = format!(
"{merge_requests_url}?state=opened&target_branch={}&per_page=100",
urlencoding(base_branch)
);
let pulls: Vec<ProviderPullRequest> = self.paged_get(&url)?;
let mut closed = 0;
for pull in pulls.into_iter().filter(|pull| {
pull.head_ref()
.is_some_and(|head| head.starts_with(head_prefix))
}) {
let Some(number) = pull.iid.or(pull.number) else {
continue;
};
let update_url = format!("{merge_requests_url}/{number}");
self.put_json::<serde_json::Value>(&update_url, &json!({ "state_event": "close" }))?;
closed += 1;
}
Ok(closed)
}
fn gitea_list_repos(&self, endpoint: &EndpointConfig) -> Result<Vec<RemoteRepo>> { fn gitea_list_repos(&self, endpoint: &EndpointConfig) -> Result<Vec<RemoteRepo>> {
match endpoint.kind { match endpoint.kind {
NamespaceKind::User => { NamespaceKind::User => {
@@ -428,6 +605,75 @@ impl<'a> ProviderClient<'a> {
self.delete_matching_hook(&hooks_url, url) self.delete_matching_hook(&hooks_url, url)
} }
fn gitea_open_pull_request(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
request: &PullRequestRequest,
) -> Result<PullRequestInfo> {
let url = self.repo_pulls_url(endpoint, &repo.name, "Gitea/Forgejo")?;
if let Some(existing) =
self.gitea_find_open_pull_request(&url, &request.head_branch, &request.base_branch)?
{
return Ok(PullRequestInfo {
url: existing.url(),
});
}
let body = json!({
"title": request.title,
"body": request.body,
"head": request.head_branch,
"base": request.base_branch,
});
let created: ProviderPullRequest = self.post_json(&url, &body)?;
Ok(PullRequestInfo { url: created.url() })
}
fn gitea_find_open_pull_request(
&self,
pulls_url: &str,
head_branch: &str,
base_branch: &str,
) -> Result<Option<ProviderPullRequest>> {
let url = format!(
"{pulls_url}?state=open&base={}&head={}&limit=50",
urlencoding(base_branch),
urlencoding(head_branch)
);
let pulls: Vec<ProviderPullRequest> = self.paged_get(&url)?;
Ok(pulls
.into_iter()
.find(|pull| pull.head_ref() == Some(head_branch)))
}
fn gitea_close_pull_requests_by_head_prefix(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
base_branch: &str,
head_prefix: &str,
) -> Result<usize> {
let pulls_url = self.repo_pulls_url(endpoint, &repo.name, "Gitea/Forgejo")?;
let url = format!(
"{pulls_url}?state=open&base={}&limit=50",
urlencoding(base_branch)
);
let pulls: Vec<ProviderPullRequest> = self.paged_get(&url)?;
let mut closed = 0;
for pull in pulls.into_iter().filter(|pull| {
pull.head_ref()
.is_some_and(|head| head.starts_with(head_prefix))
}) {
let Some(number) = pull.number.or(pull.index) else {
continue;
};
let update_url = format!("{pulls_url}/{number}");
self.patch_json::<serde_json::Value>(&update_url, &json!({ "state": "closed" }))?;
closed += 1;
}
Ok(closed)
}
fn repo_hooks_url( fn repo_hooks_url(
&self, &self,
endpoint: &EndpointConfig, endpoint: &EndpointConfig,
@@ -444,6 +690,22 @@ impl<'a> ProviderClient<'a> {
)) ))
} }
fn repo_pulls_url(
&self,
endpoint: &EndpointConfig,
repo_name: &str,
provider: &str,
) -> Result<String> {
if matches!(endpoint.kind, NamespaceKind::Group) {
bail!("{provider} endpoints use kind 'user' or 'org'");
}
Ok(format!(
"{}/repos/{}/{repo_name}/pulls",
self.site.api_base(),
endpoint.namespace
))
}
fn gitlab_hooks_url(&self, endpoint: &EndpointConfig, repo_name: &str) -> String { fn gitlab_hooks_url(&self, endpoint: &EndpointConfig, repo_name: &str) -> String {
let project = format!("{}/{repo_name}", endpoint.namespace); let project = format!("{}/{repo_name}", endpoint.namespace);
format!( format!(
@@ -453,6 +715,15 @@ impl<'a> ProviderClient<'a> {
) )
} }
fn gitlab_merge_requests_url(&self, endpoint: &EndpointConfig, repo_name: &str) -> String {
let project = format!("{}/{repo_name}", endpoint.namespace);
format!(
"{}/projects/{}/merge_requests",
self.site.api_base(),
urlencoding(&project)
)
}
fn find_existing_hook(&self, hooks_url: &str, target_url: &str) -> Result<Option<RepoHook>> { fn find_existing_hook(&self, hooks_url: &str, target_url: &str) -> Result<Option<RepoHook>> {
let hooks: Vec<RepoHook> = self.paged_get(hooks_url)?; let hooks: Vec<RepoHook> = self.paged_get(hooks_url)?;
Ok(hooks Ok(hooks
@@ -730,6 +1001,43 @@ struct RepoHook {
config: HashMap<String, String>, config: HashMap<String, String>,
} }
#[derive(Deserialize)]
struct ProviderPullRequest {
#[serde(default)]
number: Option<u64>,
#[serde(default)]
iid: Option<u64>,
#[serde(default)]
index: Option<u64>,
#[serde(default)]
html_url: Option<String>,
#[serde(default)]
web_url: Option<String>,
#[serde(default)]
head: Option<PullRequestHead>,
#[serde(default)]
source_branch: Option<String>,
}
impl ProviderPullRequest {
fn url(&self) -> Option<String> {
self.html_url.clone().or_else(|| self.web_url.clone())
}
fn head_ref(&self) -> Option<&str> {
self.head
.as_ref()
.map(|head| head.reference.as_str())
.or(self.source_branch.as_deref())
}
}
#[derive(Deserialize)]
struct PullRequestHead {
#[serde(rename = "ref")]
reference: String,
}
impl RepoHook { impl RepoHook {
fn url(&self) -> Option<&str> { fn url(&self) -> Option<&str> {
self.url self.url
+425 -30
View File
@@ -8,13 +8,16 @@ use anyhow::{Context, Result, bail};
use console::style; use console::style;
use regex::Regex; use regex::Regex;
use crate::config::{Config, EndpointConfig, MirrorConfig, default_work_dir, validate_config}; use crate::config::{
Config, ConflictResolutionStrategy, EndpointConfig, MirrorConfig, default_work_dir,
validate_config,
};
use crate::git::{ use crate::git::{
BranchDeletion, GitMirror, Redactor, RemoteRefSnapshot, RemoteSpec, BranchConflict, BranchDeletion, BranchUpdate, GitMirror, Redactor, RemoteRefSnapshot,
is_disabled_repository_error, ls_remote_refs, safe_remote_name, RemoteSpec, is_disabled_repository_error, ls_remote_refs, safe_remote_name,
}; };
use crate::logging; use crate::logging;
use crate::provider::{EndpointRepo, ProviderClient, repos_by_name}; use crate::provider::{EndpointRepo, ProviderClient, PullRequestRequest, repos_by_name};
use crate::webhook; use crate::webhook;
mod output; mod output;
@@ -32,6 +35,7 @@ use self::state::{
}; };
pub const DEFAULT_JOBS: usize = 4; pub const DEFAULT_JOBS: usize = 4;
const CONFLICT_BRANCH_ROOT: &str = "git-sync/conflicts/";
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SyncOptions { pub struct SyncOptions {
@@ -601,6 +605,7 @@ fn sync_repo(
context, context,
&mirror_repo, &mirror_repo,
&remotes, &remotes,
repos,
detailed_repo_ref_state(ref_state.repo(&context.mirror.name, repo_name)) detailed_repo_ref_state(ref_state.repo(&context.mirror.name, repo_name))
.or(cached_ref_state.as_ref()), .or(cached_ref_state.as_ref()),
&initial_ref_state, &initial_ref_state,
@@ -737,49 +742,63 @@ fn push_repo_refs(
context: &RepoSyncContext<'_>, context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror, mirror_repo: &GitMirror,
remotes: &[RemoteSpec], remotes: &[RemoteSpec],
repos: &[EndpointRepo],
previous_refs: Option<&BTreeMap<String, RemoteRefState>>, previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
current_refs: &BTreeMap<String, RemoteRefState>, current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<RepoRefSyncResult> { ) -> Result<RepoRefSyncResult> {
let (branch_deletions, deletion_conflicts, blocked_branches) = let (branch_deletions, deletion_conflicts, blocked_branches) =
branch_deletion_decisions(remotes, previous_refs, current_refs); branch_deletion_decisions(remotes, previous_refs, current_refs);
let had_deletion_conflicts = !deletion_conflicts.is_empty(); let had_deletion_conflicts = !deletion_conflicts.is_empty();
for conflict in deletion_conflicts { for conflict in &deletion_conflicts {
crate::logln!( crate::logln!(
" {} branch {} was deleted on {} but changed on {} ({})", " {} branch {} was deleted on {} but changed on {} ({})",
style("conflict").yellow().bold(), style("conflict").yellow().bold(),
style(conflict.branch).cyan(), style(&conflict.branch).cyan(),
conflict.deleted_remotes.join("+"), conflict.deleted_remotes.join("+"),
conflict.changed_remotes.join("+"), conflict.changed_remotes.join("+"),
style("skipped").dim() style("skipped").dim()
); );
} }
if had_deletion_conflicts {
fail_on_unresolved_conflict(context, "branch deletion conflict")?;
}
let (branches, conflicts) = mirror_repo.branch_decisions(remotes, context.allow_force)?; let (branches, conflicts) = mirror_repo.branch_decisions(remotes, context.allow_force)?;
let branches_to_push = branches let branches_to_push = branches
.into_iter() .into_iter()
.filter(|branch| !is_internal_conflict_branch(&branch.branch))
.filter(|branch| !blocked_branches.contains(&branch.branch)) .filter(|branch| !blocked_branches.contains(&branch.branch))
.filter(|branch| !branch.target_remotes.is_empty()) .filter(|branch| !branch.target_remotes.is_empty())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut had_branch_conflicts = false; let mut unresolved_branch_conflicts = Vec::new();
let mut rebased_branch_updates = Vec::new();
for conflict in conflicts { for conflict in conflicts {
if is_internal_conflict_branch(&conflict.branch) {
continue;
}
if blocked_branches.contains(&conflict.branch) { if blocked_branches.contains(&conflict.branch) {
continue; continue;
} }
had_branch_conflicts = true; match resolve_branch_conflict(context, mirror_repo, remotes, conflict)? {
let details = conflict BranchConflictResolution::Rebased(updates) => rebased_branch_updates.extend(updates),
.tips BranchConflictResolution::PullRequest(conflict) => {
.iter() unresolved_branch_conflicts.push(conflict)
.map(|(remote, sha)| format!("{remote}@{}", short_sha(sha)))
.collect::<Vec<_>>()
.join(", ");
crate::logln!(
" {} branch {} diverged across {} ({})",
style("conflict").yellow().bold(),
style(conflict.branch).cyan(),
details,
style("skipped").dim()
);
} }
}
}
let had_branch_conflicts = !unresolved_branch_conflicts.is_empty();
let unresolved_branch_names = unresolved_branch_conflicts
.iter()
.map(|conflict| conflict.branch.clone())
.collect::<BTreeSet<_>>();
let unresolved_or_blocked_branches = unresolved_branch_names
.union(&blocked_branches)
.cloned()
.collect::<BTreeSet<_>>();
let stale_conflict_branches = conflict_pr_base_branches(current_refs)
.difference(&unresolved_or_blocked_branches)
.cloned()
.collect::<BTreeSet<_>>();
let (tags, tag_conflicts) = mirror_repo.tag_decisions(remotes)?; let (tags, tag_conflicts) = mirror_repo.tag_decisions(remotes)?;
let had_tag_conflicts = !tag_conflicts.is_empty(); let had_tag_conflicts = !tag_conflicts.is_empty();
@@ -802,22 +821,35 @@ fn push_repo_refs(
style("skipped").dim() style("skipped").dim()
); );
} }
if had_tag_conflicts {
fail_on_unresolved_conflict(context, "tag conflict")?;
}
if branches_to_push.is_empty() && tags_to_push.is_empty() { let pushed_branch_names = branch_names(&branches_to_push);
let rebased_branch_names = branch_names_from_updates(&rebased_branch_updates);
let mut cleanup_branches = stale_conflict_branches;
cleanup_branches.retain(|branch| {
!pushed_branch_names.contains(branch) && !rebased_branch_names.contains(branch)
});
if branches_to_push.is_empty()
&& rebased_branch_updates.is_empty()
&& tags_to_push.is_empty()
&& unresolved_branch_conflicts.is_empty()
{
if !branch_deletions.is_empty() { if !branch_deletions.is_empty() {
print_branch_deletions(&branch_deletions); print_branch_deletions(&branch_deletions);
mirror_repo.delete_branches(remotes, &branch_deletions)?; mirror_repo.delete_branches(remotes, &branch_deletions)?;
}
if !cleanup_branches.is_empty() {
close_resolved_pull_requests(context, mirror_repo, remotes, repos, &cleanup_branches)?;
}
if !branch_deletions.is_empty() || !cleanup_branches.is_empty() {
return Ok(RepoRefSyncResult { return Ok(RepoRefSyncResult {
pushed: true, pushed: true,
had_conflicts: had_deletion_conflicts, had_conflicts: had_deletion_conflicts,
}); });
} }
if had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts {
return Ok(RepoRefSyncResult {
pushed: false,
had_conflicts: true,
});
}
crate::logln!( crate::logln!(
" {} branches and tags already match all endpoints", " {} branches and tags already match all endpoints",
style("up-to-date").green().bold() style("up-to-date").green().bold()
@@ -834,17 +866,375 @@ fn push_repo_refs(
if !branches_to_push.is_empty() { if !branches_to_push.is_empty() {
print_branch_decisions(&branches_to_push); print_branch_decisions(&branches_to_push);
mirror_repo.push_branches(remotes, &branches_to_push, context.allow_force)?; mirror_repo.push_branches(remotes, &branches_to_push, context.allow_force)?;
close_resolved_pull_requests(context, mirror_repo, remotes, repos, &pushed_branch_names)?;
}
if !rebased_branch_updates.is_empty() {
mirror_repo.push_branch_updates(remotes, &rebased_branch_updates)?;
close_resolved_pull_requests(context, mirror_repo, remotes, repos, &rebased_branch_names)?;
} }
if !tags_to_push.is_empty() { if !tags_to_push.is_empty() {
print_tag_decisions(&tags_to_push); print_tag_decisions(&tags_to_push);
mirror_repo.push_tags(remotes, &tags_to_push)?; mirror_repo.push_tags(remotes, &tags_to_push)?;
} }
if !unresolved_branch_conflicts.is_empty() {
open_conflict_pull_requests(
context,
mirror_repo,
remotes,
repos,
&unresolved_branch_conflicts,
)?;
}
if !cleanup_branches.is_empty() {
close_resolved_pull_requests(context, mirror_repo, remotes, repos, &cleanup_branches)?;
}
Ok(RepoRefSyncResult { Ok(RepoRefSyncResult {
pushed: true, pushed: !branches_to_push.is_empty()
|| !rebased_branch_updates.is_empty()
|| !tags_to_push.is_empty()
|| !branch_deletions.is_empty()
|| !cleanup_branches.is_empty(),
had_conflicts: had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts, had_conflicts: had_branch_conflicts || had_tag_conflicts || had_deletion_conflicts,
}) })
} }
enum BranchConflictResolution {
Rebased(Vec<BranchUpdate>),
PullRequest(BranchConflict),
}
fn resolve_branch_conflict(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
remotes: &[RemoteSpec],
conflict: BranchConflict,
) -> Result<BranchConflictResolution> {
log_branch_conflict(&conflict, context.mirror.conflict_resolution.clone());
match &context.mirror.conflict_resolution {
ConflictResolutionStrategy::Fail => {
bail!("branch {} diverged across endpoints", conflict.branch)
}
ConflictResolutionStrategy::PullRequest => {
Ok(BranchConflictResolution::PullRequest(conflict))
}
ConflictResolutionStrategy::AutoRebase => {
let decision = mirror_repo.auto_rebase_branch_conflict(
remotes,
&conflict.branch,
&conflict.tips,
)?;
log_rebase_decision(&decision.branch, &decision.sha, &decision.updates);
Ok(BranchConflictResolution::Rebased(decision.updates))
}
ConflictResolutionStrategy::AutoRebasePullRequest => {
match mirror_repo.auto_rebase_branch_conflict(remotes, &conflict.branch, &conflict.tips)
{
Ok(decision) => {
log_rebase_decision(&decision.branch, &decision.sha, &decision.updates);
Ok(BranchConflictResolution::Rebased(decision.updates))
}
Err(error) => {
crate::logln!(
" {} branch {} auto-rebase failed; opening pull requests ({})",
style("fallback").yellow().bold(),
style(&conflict.branch).cyan(),
style(error.to_string()).dim()
);
Ok(BranchConflictResolution::PullRequest(conflict))
}
}
}
}
}
fn fail_on_unresolved_conflict(context: &RepoSyncContext<'_>, label: &str) -> Result<()> {
if matches!(
&context.mirror.conflict_resolution,
ConflictResolutionStrategy::Fail
) {
bail!("{label} detected");
}
Ok(())
}
fn log_branch_conflict(conflict: &BranchConflict, strategy: ConflictResolutionStrategy) {
let details = conflict
.tips
.iter()
.map(|(remote, sha)| format!("{remote}@{}", short_sha(sha)))
.collect::<Vec<_>>()
.join(", ");
let action = match strategy {
ConflictResolutionStrategy::Fail => "failing",
ConflictResolutionStrategy::AutoRebase => "auto-rebase",
ConflictResolutionStrategy::PullRequest => "pull-request",
ConflictResolutionStrategy::AutoRebasePullRequest => "auto-rebase/pull-request",
};
crate::logln!(
" {} branch {} diverged across {} ({})",
style("conflict").yellow().bold(),
style(&conflict.branch).cyan(),
details,
style(action).dim()
);
}
fn log_rebase_decision(branch: &str, sha: &str, updates: &[BranchUpdate]) {
crate::logln!(
" {} branch {} resolved at {} ({} push{})",
style("auto-rebase").green().bold(),
style(branch).cyan(),
style(format!("@{}", short_sha(sha))).dim(),
updates.len(),
if updates.len() == 1 { "" } else { "es" }
);
}
fn open_conflict_pull_requests(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
remotes: &[RemoteSpec],
repos: &[EndpointRepo],
conflicts: &[BranchConflict],
) -> Result<()> {
let repos_by_remote = endpoint_repos_by_remote_name(context, repos)?;
for conflict in conflicts {
for (target_remote, target_sha) in &conflict.tips {
if !remotes.iter().any(|remote| &remote.name == target_remote) {
continue;
}
let Some(target_repo) = repos_by_remote.get(target_remote) else {
continue;
};
for (source_remote, source_sha) in
conflict.tips.iter().filter(|(_, sha)| sha != target_sha)
{
let head_branch = conflict_pr_branch(&conflict.branch, source_remote, source_sha);
let update = BranchUpdate {
branch: head_branch.clone(),
sha: source_sha.clone(),
target_remote: target_remote.clone(),
force: false,
};
mirror_repo.push_branch_updates(remotes, &[update])?;
let title = format!(
"Resolve git-sync conflict: {} from {}",
conflict.branch, source_remote
);
let body = format!(
"git-sync detected divergent branch tips and opened this pull request to merge {}@{} into {}@{}.",
source_remote,
short_sha(source_sha),
target_remote,
short_sha(target_sha)
);
crate::logln!(
" {} branch {} {} -> {}",
style("pull request").cyan().bold(),
style(&conflict.branch).cyan(),
source_remote,
target_remote
);
if context.dry_run {
continue;
}
let site = context.config.site(&target_repo.endpoint.site).unwrap();
let client = ProviderClient::new(site)?;
let pr = client.open_pull_request(
&target_repo.endpoint,
&target_repo.repo,
&PullRequestRequest {
title,
body,
head_branch,
base_branch: conflict.branch.clone(),
},
)?;
if let Some(url) = pr.url {
crate::logln!(
" {} {}",
style("opened").green().bold(),
style(url).dim()
);
}
}
}
}
Ok(())
}
fn close_resolved_pull_requests(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
remotes: &[RemoteSpec],
repos: &[EndpointRepo],
branches: &BTreeSet<String>,
) -> Result<()> {
if context.dry_run || branches.is_empty() {
return Ok(());
}
let repos_by_remote = endpoint_repos_by_remote_name(context, repos)?;
for remote in remotes {
let Some(endpoint_repo) = repos_by_remote.get(&remote.name) else {
continue;
};
let site = context.config.site(&endpoint_repo.endpoint.site).unwrap();
let client = ProviderClient::new(site)?;
for branch in branches {
let prefix = conflict_pr_branch_prefix(branch);
let closed = client.close_pull_requests_by_head_prefix(
&endpoint_repo.endpoint,
&endpoint_repo.repo,
branch,
&prefix,
)?;
if closed > 0 {
crate::logln!(
" {} {} stale pull request{} for branch {} on {}",
style("close").green().bold(),
closed,
if closed == 1 { "" } else { "s" },
style(branch).cyan(),
style(&remote.display).dim()
);
}
delete_conflict_branches(mirror_repo, remotes, remote, &prefix)?;
}
}
Ok(())
}
fn delete_conflict_branches(
mirror_repo: &GitMirror,
remotes: &[RemoteSpec],
remote: &RemoteSpec,
prefix: &str,
) -> Result<()> {
let deletions = mirror_repo
.remote_branch_names_with_prefix(&remote.name, prefix)?
.into_iter()
.map(|branch| BranchDeletion {
branch,
deleted_remotes: Vec::new(),
target_remotes: vec![remote.name.clone()],
})
.collect::<Vec<_>>();
if deletions.is_empty() {
return Ok(());
}
mirror_repo.delete_branches(remotes, &deletions)
}
fn endpoint_repos_by_remote_name<'a>(
context: &RepoSyncContext<'_>,
repos: &'a [EndpointRepo],
) -> Result<HashMap<String, &'a EndpointRepo>> {
let mut output = HashMap::new();
for repo in repos {
let remote_name = remote_name_for_endpoint_repo(repo);
if context
.mirror
.endpoints
.iter()
.any(|endpoint| endpoint == &repo.endpoint)
{
output.insert(remote_name, repo);
}
}
Ok(output)
}
fn remote_name_for_endpoint_repo(endpoint_repo: &EndpointRepo) -> String {
safe_remote_name(&format!(
"{}_{}",
endpoint_repo.endpoint.site, endpoint_repo.endpoint.namespace
))
}
fn branch_names(branches: &[crate::git::BranchDecision]) -> BTreeSet<String> {
branches
.iter()
.map(|branch| branch.branch.clone())
.collect()
}
fn branch_names_from_updates(updates: &[BranchUpdate]) -> BTreeSet<String> {
updates.iter().map(|update| update.branch.clone()).collect()
}
fn conflict_pr_base_branches(refs: &BTreeMap<String, RemoteRefState>) -> BTreeSet<String> {
refs.values()
.flat_map(|remote| remote.branches.keys())
.filter_map(|branch| conflict_pr_base_branch(branch))
.collect()
}
fn conflict_pr_branch(branch: &str, source_remote: &str, source_sha: &str) -> String {
format!(
"{}from-{}-{}",
conflict_pr_branch_prefix(branch),
safe_ref_component(source_remote),
short_sha(source_sha)
)
}
fn conflict_pr_branch_prefix(branch: &str) -> String {
format!("{}{}/", CONFLICT_BRANCH_ROOT, hex_component(branch))
}
fn is_internal_conflict_branch(branch: &str) -> bool {
branch.starts_with(CONFLICT_BRANCH_ROOT)
}
fn conflict_pr_base_branch(branch: &str) -> Option<String> {
let rest = branch.strip_prefix(CONFLICT_BRANCH_ROOT)?;
let (encoded, _) = rest.split_once('/')?;
decode_hex_component(encoded)
}
fn hex_component(value: &str) -> String {
const HEX: &[u8; 16] = b"0123456789abcdef";
let mut output = String::with_capacity(value.len() * 2);
for byte in value.bytes() {
output.push(HEX[(byte >> 4) as usize] as char);
output.push(HEX[(byte & 0x0f) as usize] as char);
}
output
}
fn decode_hex_component(value: &str) -> Option<String> {
if value.len() % 2 != 0 {
return None;
}
let mut bytes = Vec::with_capacity(value.len() / 2);
for pair in value.as_bytes().chunks_exact(2) {
let high = hex_value(pair[0])?;
let low = hex_value(pair[1])?;
bytes.push((high << 4) | low);
}
String::from_utf8(bytes).ok()
}
fn hex_value(byte: u8) -> Option<u8> {
match byte {
b'0'..=b'9' => Some(byte - b'0'),
b'a'..=b'f' => Some(byte - b'a' + 10),
_ => None,
}
}
fn safe_ref_component(value: &str) -> String {
let mut output = String::new();
for ch in value.chars() {
if ch.is_ascii_alphanumeric() || matches!(ch, '-' | '_' | '.') {
output.push(ch);
} else {
output.push('-');
}
}
output.trim_matches('-').to_string()
}
struct BranchDeletionConflict { struct BranchDeletionConflict {
branch: String, branch: String,
deleted_remotes: Vec<String>, deleted_remotes: Vec<String>,
@@ -869,7 +1259,12 @@ fn branch_deletion_decisions(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut branches = BTreeSet::new(); let mut branches = BTreeSet::new();
for refs in previous_refs.values() { for refs in previous_refs.values() {
branches.extend(refs.branches.keys().cloned()); branches.extend(
refs.branches
.keys()
.filter(|branch| !is_internal_conflict_branch(branch))
.cloned(),
);
} }
let mut deletions = Vec::new(); let mut deletions = Vec::new();
+7
View File
@@ -22,6 +22,7 @@ fn parses_token_forms() {
create_missing = true create_missing = true
visibility = "private" visibility = "private"
allow_force = false allow_force = false
conflict_resolution = "auto_rebase_pull_request"
[[mirrors.endpoints]] [[mirrors.endpoints]]
site = "github" site = "github"
@@ -38,6 +39,10 @@ fn parses_token_forms() {
assert_eq!(config.sites.len(), 1); assert_eq!(config.sites.len(), 1);
assert_eq!(config.mirrors[0].endpoints.len(), 2); assert_eq!(config.mirrors[0].endpoints.len(), 2);
assert_eq!(
config.mirrors[0].conflict_resolution,
ConflictResolutionStrategy::AutoRebasePullRequest
);
let webhook = config.webhook.unwrap(); let webhook = config.webhook.unwrap();
assert!(webhook.install); assert!(webhook.install);
assert_eq!(webhook.url, "https://mirror.example.test/webhook"); assert_eq!(webhook.url, "https://mirror.example.test/webhook");
@@ -62,6 +67,7 @@ fn validation_rejects_unknown_sites_and_single_endpoint_groups() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
@@ -87,6 +93,7 @@ fn validation_rejects_unknown_sites_and_single_endpoint_groups() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
+100
View File
@@ -180,6 +180,83 @@ fn branch_decisions_force_selects_newest_divergent_tip() {
assert_eq!(main.target_remotes, vec!["a".to_string()]); assert_eq!(main.target_remotes, vec!["a".to_string()]);
} }
#[test]
fn auto_rebase_branch_conflict_replays_later_tip_and_marks_force_targets() {
let fixture = GitFixture::new();
let base = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let a_tip = fixture.commit_file("a", "a.txt", "a\n", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
let b_tip = fixture.commit_file("b", "b.txt", "b\n", 1_700_000_200);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (_, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
let decision = mirror
.auto_rebase_branch_conflict(&fixture.remotes(), "main", &conflicts[0].tips)
.unwrap();
assert_eq!(decision.branch, "main");
assert_ne!(decision.sha, a_tip);
assert_ne!(decision.sha, b_tip);
assert_eq!(decision.updates.len(), 2);
let a_update = decision
.updates
.iter()
.find(|update| update.target_remote == "a")
.unwrap();
assert!(!a_update.force);
let b_update = decision
.updates
.iter()
.find(|update| update.target_remote == "b")
.unwrap();
assert!(b_update.force);
mirror
.push_branch_updates(&fixture.remotes(), &decision.updates)
.unwrap();
assert_eq!(
fixture.remote_ref(&fixture.remote_a, "refs/heads/main"),
decision.sha
);
assert_eq!(
fixture.remote_ref(&fixture.remote_b, "refs/heads/main"),
decision.sha
);
}
#[test]
fn auto_rebase_branch_conflict_fails_on_file_conflict() {
let fixture = GitFixture::new();
fixture.commit_file("base", "file.txt", "base\n", 1_700_000_000);
let base = fixture.head();
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
fixture.commit_file("a", "file.txt", "a\n", 1_700_000_100);
fixture.push_head(&fixture.remote_a, "main");
fixture.reset_hard(&base);
fixture.commit_file("b", "file.txt", "b\n", 1_700_000_200);
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let (_, conflicts) = mirror.branch_decisions(&fixture.remotes(), false).unwrap();
let error = mirror
.auto_rebase_branch_conflict(&fixture.remotes(), "main", &conflicts[0].tips)
.unwrap_err()
.to_string();
assert!(error.contains("auto-rebase failed"));
}
#[test] #[test]
fn push_branches_creates_missing_branch_on_other_remotes() { fn push_branches_creates_missing_branch_on_other_remotes() {
let fixture = GitFixture::new(); let fixture = GitFixture::new();
@@ -369,6 +446,29 @@ impl GitFixture {
self.head() self.head()
} }
fn commit_file(
&self,
message: &str,
file_name: &str,
contents: &str,
timestamp: i64,
) -> String {
let path = self.work.join(file_name);
fs::write(path, contents).unwrap();
git(Some(&self.work), ["add", file_name]);
let date = format!("@{timestamp} +0000");
let output = Command::new("git")
.current_dir(&self.work)
.env("GIT_AUTHOR_DATE", &date)
.env("GIT_COMMITTER_DATE", &date)
.args(["commit", "-m", message])
.output()
.unwrap();
assert_success(&output, "git commit");
self.head()
}
fn head(&self) -> String { fn head(&self) -> String {
git_output(Some(&self.work), ["rev-parse", "HEAD"]) git_output(Some(&self.work), ["rev-parse", "HEAD"])
} }
+15 -1
View File
@@ -11,6 +11,7 @@ fn wizard_builds_sync_group_from_profile_urls() {
"gt-token", "gt-token",
"", "",
"n", "n",
"",
"n", "n",
"4", "4",
] ]
@@ -44,6 +45,10 @@ fn wizard_builds_sync_group_from_profile_urls() {
assert!(config.mirrors[0].create_missing); assert!(config.mirrors[0].create_missing);
assert_eq!(config.mirrors[0].visibility, Visibility::Private); assert_eq!(config.mirrors[0].visibility, Visibility::Private);
assert!(!config.mirrors[0].allow_force); assert!(!config.mirrors[0].allow_force);
assert_eq!(
config.mirrors[0].conflict_resolution,
ConflictResolutionStrategy::AutoRebasePullRequest
);
let output = String::from_utf8(output).unwrap(); let output = String::from_utf8(output).unwrap();
assert!(output.contains("1. github.com/hykilpikonna <-> gitea.example.test/azalea")); assert!(output.contains("1. github.com/hykilpikonna <-> gitea.example.test/azalea"));
@@ -67,6 +72,7 @@ fn wizard_can_build_three_way_sync() {
"gt-token", "gt-token",
"", "",
"n", "n",
"",
"n", "n",
"4", "4",
] ]
@@ -92,6 +98,7 @@ fn wizard_can_enable_webhooks() {
"gt-token", "gt-token",
"", "",
"n", "n",
"",
"y", "y",
"https://mirror.example.test/webhook", "https://mirror.example.test/webhook",
"y", "y",
@@ -142,6 +149,7 @@ fn wizard_reuses_existing_credentials_for_same_instance() {
"https://github.com/bob", "https://github.com/bob",
"", "",
"n", "n",
"",
"n", "n",
"4", "4",
] ]
@@ -195,6 +203,7 @@ fn wizard_starts_existing_config_at_sync_group_menu() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
@@ -256,6 +265,7 @@ fn wizard_edits_existing_sync_group_from_menu() {
create_missing: false, create_missing: false,
visibility: Visibility::Public, visibility: Visibility::Public,
allow_force: true, allow_force: true,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
@@ -267,6 +277,7 @@ fn wizard_edits_existing_sync_group_from_menu() {
"https://gitlab.com/bob", "https://gitlab.com/bob",
"", "",
"n", "n",
"",
"n", "n",
"4", "4",
] ]
@@ -333,10 +344,11 @@ fn wizard_prefills_existing_sync_group_when_editing() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
let input = ["2", "1", "", "", "", "", "n", "n", "4"].join("\n") + "\n"; let input = ["2", "1", "", "", "", "", "n", "", "n", "4"].join("\n") + "\n";
let mut reader = Cursor::new(input.as_bytes()); let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new(); let mut output = Vec::new();
@@ -393,6 +405,7 @@ fn wizard_deletes_existing_sync_group_from_menu() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
@@ -448,6 +461,7 @@ fn wizard_can_go_back_from_delete_menu() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };
+61
View File
@@ -47,12 +47,14 @@ where
W: Write, W: Write,
{ {
let endpoints = prompt_sync_group_endpoints(reader, writer, config, &[])?; let endpoints = prompt_sync_group_endpoints(reader, writer, config, &[])?;
let conflict_resolution = prompt_conflict_resolution(reader, writer, None)?;
config.upsert_mirror(MirrorConfig { config.upsert_mirror(MirrorConfig {
name: next_mirror_name(config), name: next_mirror_name(config),
endpoints, endpoints,
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution,
}); });
prompt_webhook_setup(reader, writer, config)?; prompt_webhook_setup(reader, writer, config)?;
Ok(()) Ok(())
@@ -249,8 +251,16 @@ where
match value.parse::<usize>() { match value.parse::<usize>() {
Ok(index) if (1..=config.mirrors.len()).contains(&index) => { Ok(index) if (1..=config.mirrors.len()).contains(&index) => {
let existing = config.mirrors[index - 1].endpoints.clone(); let existing = config.mirrors[index - 1].endpoints.clone();
let existing_conflict_resolution =
config.mirrors[index - 1].conflict_resolution.clone();
let endpoints = prompt_sync_group_endpoints(reader, writer, config, &existing)?; let endpoints = prompt_sync_group_endpoints(reader, writer, config, &existing)?;
let conflict_resolution = prompt_conflict_resolution(
reader,
writer,
Some(&existing_conflict_resolution),
)?;
config.mirrors[index - 1].endpoints = endpoints; config.mirrors[index - 1].endpoints = endpoints;
config.mirrors[index - 1].conflict_resolution = conflict_resolution;
prompt_webhook_setup(reader, writer, config)?; prompt_webhook_setup(reader, writer, config)?;
writeln!(writer, "updated sync group {index}")?; writeln!(writer, "updated sync group {index}")?;
return Ok(true); return Ok(true);
@@ -408,6 +418,57 @@ where
} }
} }
fn prompt_conflict_resolution<R, W>(
reader: &mut R,
writer: &mut W,
existing: Option<&ConflictResolutionStrategy>,
) -> Result<ConflictResolutionStrategy>
where
R: BufRead,
W: Write,
{
let default = existing
.map(conflict_resolution_value)
.unwrap_or("auto-rebase + pull-request");
loop {
writeln!(writer, "How should git-sync resolve branch conflicts?")?;
writeln!(writer, " 1. fail")?;
writeln!(writer, " 2. auto-rebase and fail on file conflict")?;
writeln!(writer, " 3. pull-request")?;
writeln!(writer, " 4. auto-rebase + pull-request (recommended)")?;
let value = prompt_with_default(reader, writer, "Conflict resolution", default)?;
match value.trim().to_ascii_lowercase().as_str() {
"1" | "fail" => return Ok(ConflictResolutionStrategy::Fail),
"2" | "auto-rebase" | "auto_rebase" | "rebase" => {
return Ok(ConflictResolutionStrategy::AutoRebase);
}
"3" | "pull-request" | "pull_request" | "pr" => {
return Ok(ConflictResolutionStrategy::PullRequest);
}
"4"
| "auto-rebase + pull-request"
| "auto-rebase+pull-request"
| "auto_rebase_pull_request"
| "auto-rebase-pull-request" => {
return Ok(ConflictResolutionStrategy::AutoRebasePullRequest);
}
_ => writeln!(
writer,
"Enter 1, 2, 3, 4, fail, auto-rebase, pull-request, or auto-rebase + pull-request."
)?,
}
}
}
fn conflict_resolution_value(strategy: &ConflictResolutionStrategy) -> &'static str {
match strategy {
ConflictResolutionStrategy::Fail => "fail",
ConflictResolutionStrategy::AutoRebase => "auto-rebase",
ConflictResolutionStrategy::PullRequest => "pull-request",
ConflictResolutionStrategy::AutoRebasePullRequest => "auto-rebase + pull-request",
}
}
fn write_sync_groups<W>(config: &Config, writer: &mut W) -> Result<()> fn write_sync_groups<W>(config: &Config, writer: &mut W) -> Result<()>
where where
W: Write, W: Write,
+114
View File
@@ -255,6 +255,120 @@ fn uninstall_webhook_deletes_matching_github_hook() {
handle.join().unwrap(); handle.join().unwrap();
} }
#[test]
fn open_pull_request_posts_github_pull_when_missing() {
let (api_url, handle) = request_server(
vec![
("200 OK", "[]"),
(
"201 Created",
r#"{"number":7,"html_url":"https://github.example.test/pull/7"}"#,
),
],
|index, request| match index {
0 => assert!(
request
.starts_with("GET /repos/alice/repo/pulls?state=open&base=main&per_page=100 "),
"request was {request}"
),
1 => {
assert!(
request.starts_with("POST /repos/alice/repo/pulls "),
"request was {request}"
);
assert!(request.contains("Resolve conflict"));
assert!(request.contains("git-sync/conflicts/main/from-b-abc123"));
assert!(request.contains("main"));
}
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let client = ProviderClient::new(&site).unwrap();
let pr = client
.open_pull_request(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
&RemoteRepo {
name: "repo".to_string(),
clone_url: "https://github.com/alice/repo.git".to_string(),
private: true,
description: None,
},
&PullRequestRequest {
title: "Resolve conflict".to_string(),
body: "Body".to_string(),
head_branch: "git-sync/conflicts/main/from-b-abc123".to_string(),
base_branch: "main".to_string(),
},
)
.unwrap();
assert_eq!(pr.url.unwrap(), "https://github.example.test/pull/7");
handle.join().unwrap();
}
#[test]
fn close_pull_requests_by_head_prefix_closes_matching_github_pulls() {
let (api_url, handle) = request_server(
vec![
(
"200 OK",
r#"[{"number":7,"head":{"ref":"git-sync/conflicts/main/from-b-abc123"}},{"number":8,"head":{"ref":"feature"}}]"#,
),
("200 OK", r#"{"number":7}"#),
],
|index, request| match index {
0 => assert!(
request
.starts_with("GET /repos/alice/repo/pulls?state=open&base=main&per_page=100 "),
"request was {request}"
),
1 => {
assert!(
request.starts_with("PATCH /repos/alice/repo/pulls/7 "),
"request was {request}"
);
assert!(request.contains("closed"));
}
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
let client = ProviderClient::new(&site).unwrap();
let closed = client
.close_pull_requests_by_head_prefix(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
&RemoteRepo {
name: "repo".to_string(),
clone_url: "https://github.com/alice/repo.git".to_string(),
private: true,
description: None,
},
"main",
"git-sync/conflicts/main/",
)
.unwrap();
assert_eq!(closed, 1);
handle.join().unwrap();
}
fn site(provider: ProviderKind, git_username: Option<String>) -> SiteConfig { fn site(provider: ProviderKind, git_username: Option<String>) -> SiteConfig {
SiteConfig { SiteConfig {
name: "site".to_string(), name: "site".to_string(),
+46
View File
@@ -146,6 +146,52 @@ fn branch_deletion_decisions_conflict_when_branch_changed_elsewhere() {
assert_eq!(conflicts[0].changed_remotes, vec!["gitea".to_string()]); assert_eq!(conflicts[0].changed_remotes, vec!["gitea".to_string()]);
} }
#[test]
fn branch_deletion_decisions_ignore_internal_conflict_branches() {
let remotes = test_remotes();
let conflict_branch = conflict_pr_branch("main", "gitea", "abc123");
let mut previous = BTreeMap::new();
previous.insert(
"github".to_string(),
remote_ref_state("a", &[(conflict_branch.as_str(), "111")]),
);
previous.insert(
"gitea".to_string(),
remote_ref_state("b", &[(conflict_branch.as_str(), "111")]),
);
let mut current = BTreeMap::new();
current.insert("github".to_string(), remote_ref_state("c", &[]));
current.insert(
"gitea".to_string(),
remote_ref_state("d", &[(conflict_branch.as_str(), "111")]),
);
let (deletions, conflicts, blocked) =
branch_deletion_decisions(&remotes, Some(&previous), &current);
assert!(deletions.is_empty());
assert!(conflicts.is_empty());
assert!(blocked.is_empty());
}
#[test]
fn conflict_branch_prefixes_are_reversible_not_slug_collisions() {
let slash_branch = conflict_pr_branch_prefix("release/foo");
let dash_branch = conflict_pr_branch_prefix("release-foo");
assert_ne!(slash_branch, dash_branch);
assert!(slash_branch.starts_with(CONFLICT_BRANCH_ROOT));
assert!(dash_branch.starts_with(CONFLICT_BRANCH_ROOT));
assert_eq!(
conflict_pr_base_branch(&format!("{slash_branch}from-gitea-abc123")),
Some("release/foo".to_string())
);
assert_eq!(
conflict_pr_base_branch(&format!("{dash_branch}from-gitea-abc123")),
Some("release-foo".to_string())
);
}
fn remote_ref_state(hash: &str, branches: &[(&str, &str)]) -> RemoteRefState { fn remote_ref_state(hash: &str, branches: &[(&str, &str)]) -> RemoteRefState {
RemoteRefState { RemoteRefState {
hash: hash.to_string(), hash: hash.to_string(),
+3 -1
View File
@@ -1,6 +1,7 @@
use super::*; use super::*;
use crate::config::{ use crate::config::{
EndpointConfig, MirrorConfig, NamespaceKind, SiteConfig, TokenConfig, Visibility, ConflictResolutionStrategy, EndpointConfig, MirrorConfig, NamespaceKind, SiteConfig,
TokenConfig, Visibility,
}; };
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::net::TcpListener; use std::net::TcpListener;
@@ -111,6 +112,7 @@ fn matches_jobs_by_provider_and_namespace() {
create_missing: true, create_missing: true,
visibility: Visibility::Private, visibility: Visibility::Private,
allow_force: false, allow_force: false,
conflict_resolution: ConflictResolutionStrategy::Fail,
}], }],
webhook: None, webhook: None,
}; };