Compare commits

..

47 Commits

Author SHA1 Message Date
azalea 81edd9b8bf [F] Fix logging 2026-05-10 10:45:33 +00:00
azalea 09cede6658 [F] Fix log sync 2026-05-10 06:14:32 +00:00
azalea 009ba9b247 [U] info 2026-05-10 05:46:50 +00:00
azalea fbf5534fe3 [F] Visibility sync 2026-05-10 01:36:06 +00:00
azalea b41f530d1e [O] Parallel everything (#5) 2026-05-10 09:28:38 +08:00
azalea 915a63a955 [F] Fix gitlab project listing 2026-05-10 01:07:47 +00:00
azalea b0469d80a7 [F] Fix webhook uninstall 2026-05-09 23:53:43 +00:00
azalea 260f42b973 [F] Webhook issues 2026-05-09 23:44:18 +00:00
azalea 44b1865b15 [F] Fix webhook uninstall missing url 2026-05-09 22:47:02 +00:00
azalea de88150445 [O] Webhook install respect filters 2026-05-09 22:38:50 +00:00
azalea f94a0f11b5 [F] Fix gitea pagination 2026-05-09 22:24:13 +00:00
azalea 0ee43ea58f [O] Test actual PR 2026-05-09 19:37:08 +00:00
azalea 10c55062eb [-] Remove legacy features 2026-05-09 17:41:20 +00:00
azalea f3c0b90a0d [U] Update deps 2026-05-09 17:07:45 +00:00
azalea 513bda3696 [O] UX 2026-05-09 08:22:31 +00:00
azalea 018f1f12d5 [O] UX 2026-05-08 16:24:23 +00:00
azalea 3d73f20c1f [+] End-to-end testing 2026-05-08 15:32:33 +00:00
azalea e43e555b37 [+] Track repo deletions 2026-05-08 07:01:32 +00:00
azalea bc6509ad59 [U] Readme 2026-05-08 06:59:17 +00:00
azalea f28f96f27c [U] Readme 2026-05-08 06:57:55 +00:00
azalea 527e69dc4a [O] Readme 2026-05-08 06:44:26 +00:00
azalea 566c3c1b59 [F] Fix build 2026-05-08 06:33:42 +00:00
azalea b7f3404f99 [+] Docker 2026-05-08 06:29:13 +00:00
azalea d19b061f7c [O] Rewrite readme 2026-05-08 05:58:02 +00:00
azalea 9a9cbba767 [M] Rebrand 2026-05-08 05:34:08 +00:00
azalea ebeb045c51 [+] Conflict resolution (#4)
* [+] Conflict resolution

* [F] Fix conflict resolution branches being synched
2026-05-08 13:16:13 +08:00
azalea 67dd55a1cf [O] Rework webhook 2026-05-08 04:13:45 +00:00
azalea 0566e97c6a [O] Better webhook error handling 2026-05-08 01:03:30 +00:00
azalea 60b6caf1a6 [O] Better interactive config 2026-05-08 00:52:37 +00:00
azalea 9c651728e4 [+] Github Actions builder 2026-05-07 18:48:04 +00:00
azalea 7fc3ab32ad [M] Move tests 2026-05-07 18:27:38 +00:00
azalea 17e3961267 [O] Cleanup codebase 2026-05-07 18:08:33 +00:00
azalea 19658c4ba9 [O] Clean up codebase 2026-05-07 16:41:18 +00:00
azalea 4545a5e515 Revert "[O] Use libraries"
This reverts commit b225c4bbd8.
2026-05-07 16:27:28 +00:00
azalea b225c4bbd8 [O] Use libraries 2026-05-07 15:03:47 +00:00
azalea c013ce1858 [+] Webhook mode 2026-05-07 04:55:49 +00:00
azalea 7b65d919d6 [+] Forgejo (#3)
* [+] Forgejo, tangled

* [-] Tangled
2026-05-07 12:45:01 +08:00
azalea 39ba96051c [F] Fix branch deletion awareness 2026-05-07 02:58:43 +00:00
azalea b70eaee2aa [F] Fix heuristic 2026-05-07 02:22:34 +00:00
azalea 92bcee49ea [O] Better multithreaded logging 2026-05-06 20:19:13 +00:00
azalea bb991d94f0 [+] Multithreading 2026-05-06 20:05:59 +00:00
azalea 61450e3a97 [O] Optimize sync with heuristic 2026-05-06 18:32:16 +00:00
azalea 18cf024b6e [+] Retry 2026-05-06 17:42:20 +00:00
azalea 3c0b3fc1e8 [O] Better CLI wizard (#2) 2026-05-04 06:49:25 +08:00
azalea 0cdabb09e6 [O] Better CLI wizard (#1) 2026-05-04 01:54:43 +08:00
azalea fd7e0db96d Initial commit 2026-05-03 17:27:50 +00:00
azalea 947e71d1fd [+] Create repo by codex 2026-05-03 17:24:24 +00:00
20 changed files with 260 additions and 2020 deletions
+13 -46
View File
@@ -1,7 +1,3 @@
<p align="center">
<img src="./docs/refray.png" alt="refray logo" width="70%"/>
</p>
# refray
A tool to keep your repos in sync across all git platforms, while being able to work from everywhere all at once.
@@ -12,46 +8,17 @@ Created becasue github is so unusable and [unreliable](https://red-squares.cian.
- **read-write mirrors**: Make changes from any provider, and the changes will sync to the others
- **webhook support**: Sync right after push, reduce potential divergence window
- **conflict handling**: Rebase or open pull requests when two platforms diverge
- **tracks deletions**: Branches/repo deletions sync across platforms (with backup)
- **tracks deletions**: Delete branches/repos across platforms when they are deleted from one platform
- **selective sync**: Sync subset of repos by regex white/black list, or by private/public visibility
- **multithreaded**: Process multiple repos simultaneously!
Supported platforms: GitHub, GitLab, Gitea, Forgejo
> [!NOTE]
> My cat made this codebase, meow
![demo](./docs/demo.webp)
<!--
The demo was rendered from an asciinema cast with capped idle pauses, Sarasa Mono SC, a One Half Dark palette with lighter dark-gray ANSI slots, and a larger font:
```sh
agg --idle-time-limit 1 \
--theme '282C34,DCDFE4,5C6370,E06C75,98C379,E5C07B,61AFEF,C678DD,56B6C2,DCDFE4,7F848E,E06C75,98C379,E5C07B,61AFEF,C678DD,56B6C2,DCDFE4' \
--text-font-family 'Sarasa Mono SC' \
--font-size 24 \
--cols 160 \
--rows 42 \
../out.cast \
demo.gif
ffmpeg -i demo.gif \
-loop 0 \
-c:v libwebp_anim \
-lossless 1 \
-compression_level 6 \
-q:v 100 \
docs/demo.webp
```
--->
> Meow
## Install
### Option 1. Install with Cargo
### Option 1. Install from source
1. Install rust cargo if you don't have it: https://rustup.rs
2. `cargo install refray`
@@ -102,8 +69,8 @@ token = { value = "gitea_pat_..." }
[[mirrors]]
name = "personal"
sync_visibility = "all"
repo_whitelist = "^important-"
repo_blacklist = "-archive$"
repo_whitelist = ["^important-"]
repo_blacklist = ["-archive$"]
create_missing = true
visibility = "private"
conflict_resolution = "auto_rebase_pull_request"
@@ -206,17 +173,13 @@ To move installed hooks to a new public URL, use `webhook update`. It removes ho
refray webhook update https://new.example.com/webhook
```
## Issues and Pull Requests
Issues and pull requests are not mirrored.
<!-- ## Sync Semantics
## Sync Semantics
Each mirror group is treated as a set of equivalent namespaces. Repositories are matched by repository name across all endpoints.
Set `sync_visibility = "all"`, `"private"`, or `"public"` on a mirror group to choose which repository visibility is included in that group. When `refray` creates a missing repository, it mirrors the visibility of the existing repository it is syncing from; `visibility` is only a fallback when no source visibility is available.
Set `repo_whitelist = "..."` and/or `repo_blacklist = "..."` on a mirror group to filter repository names with regular expressions. Omit `repo_whitelist` to include all repository names, and blacklist matches are excluded after whitelist matches. These name filters are independent from `sync_visibility`; both must match for a repository to be synced.
Set `repo_whitelist = ["..."]` and/or `repo_blacklist = ["..."]` on a mirror group to filter repository names with regular expressions. An empty whitelist includes all repository names, and blacklist matches are excluded after whitelist matches. These name filters are independent from `sync_visibility`; both must match for a repository to be synced.
For every repository name found in any endpoint, `refray` will:
@@ -240,7 +203,7 @@ Conflict resolution strategies are configured per mirror group:
When a previously opened conflict pull request is merged, the next sync sees the merged branch as the winning tip, pushes it to the other endpoints, and closes stale `refray/conflicts/...` pull requests for that branch.
Repository and branch deletion are propagated only when it is safe to infer intent, and `refray` writes local backup refs and bundle files under the work-dir `backups/` directory before propagating those deletions. If a repository existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous synced refs, `refray` deletes it from the remaining endpoints instead of recreating it when `delete_missing = true`. If `delete_missing = false`, that missing repository is not treated as a deletion and normal missing-repository handling applies. If the repository was deleted everywhere, `refray` removes its saved sync state after creating a local backup from the mirror cache. If the repository was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
Repository and branch deletion are propagated only when it is safe to infer intent. If a repository existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous synced refs, `refray` deletes it from the remaining endpoints instead of recreating it. If the repository was deleted everywhere, `refray` removes its saved sync state. If the repository was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
Branch deletion follows the same rule at branch scope: if a branch existed on every endpoint in the previous successful sync, then disappears from one endpoint while the remaining endpoints still have the previous tip, `refray` deletes it from the remaining endpoints instead of recreating it. If the branch was deleted on one endpoint but changed elsewhere, it is treated as a conflict and skipped.
@@ -276,4 +239,8 @@ REFRAY_E2E_ALLOW_DESTRUCTIVE=1 \
cargo test --test sequential -- --ignored --test-threads=1 --nocapture
```
By default cleanup only deletes repositories named `refray-e2e-*`. To start by deleting every owned repository visible to the configured accounts, set `REFRAY_E2E_CLEAR_ALL_REPOS=DELETE_ALL_OWNED_REPOS`. Provider skips (`REFRAY_E2E_SKIP_GITHUB`, `REFRAY_E2E_SKIP_GITLAB`, `REFRAY_E2E_SKIP_GITEA`, `REFRAY_E2E_SKIP_FORGEJO`) and `REFRAY_E2E_ALLOW_PARTIAL=1` are available for local debugging, but the full support check should run with all four providers. -->
By default cleanup only deletes repositories named `refray-e2e-*`. To start by deleting every owned repository visible to the configured accounts, set `REFRAY_E2E_CLEAR_ALL_REPOS=DELETE_ALL_OWNED_REPOS`. Provider skips (`REFRAY_E2E_SKIP_GITHUB`, `REFRAY_E2E_SKIP_GITLAB`, `REFRAY_E2E_SKIP_GITEA`, `REFRAY_E2E_SKIP_FORGEJO`) and `REFRAY_E2E_ALLOW_PARTIAL=1` are available for local debugging, but the full support check should run with all four providers.
## Issues and Pull Requests
Issues and pull requests are not mirrored.
BIN
View File
Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 MiB

BIN
View File
Binary file not shown.

Before

Width:  |  Height:  |  Size: 118 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 433 KiB

+23 -31
View File
@@ -56,14 +56,12 @@ pub struct MirrorConfig {
pub endpoints: Vec<EndpointConfig>,
#[serde(default)]
pub sync_visibility: SyncVisibility,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub repo_whitelist: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub repo_blacklist: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub repo_whitelist: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub repo_blacklist: Vec<String>,
#[serde(default = "default_true")]
pub create_missing: bool,
#[serde(default = "default_true")]
pub delete_missing: bool,
#[serde(default)]
pub visibility: Visibility,
#[serde(default)]
@@ -137,8 +135,8 @@ pub enum SyncVisibility {
#[derive(Clone, Debug)]
pub struct RepoNameFilter {
whitelist: Option<Regex>,
blacklist: Option<Regex>,
whitelist: Vec<Regex>,
blacklist: Vec<Regex>,
}
impl SyncVisibility {
@@ -154,41 +152,35 @@ impl SyncVisibility {
impl MirrorConfig {
pub fn repo_filter(&self) -> Result<RepoNameFilter> {
Ok(RepoNameFilter {
whitelist: compile_repo_pattern(&self.name, "repo_whitelist", &self.repo_whitelist)?,
blacklist: compile_repo_pattern(&self.name, "repo_blacklist", &self.repo_blacklist)?,
whitelist: compile_repo_patterns(&self.name, "repo_whitelist", &self.repo_whitelist)?,
blacklist: compile_repo_patterns(&self.name, "repo_blacklist", &self.repo_blacklist)?,
})
}
}
impl RepoNameFilter {
pub fn matches(&self, repo_name: &str) -> bool {
let whitelisted = self
.whitelist
.as_ref()
.is_none_or(|pattern| pattern.is_match(repo_name));
let whitelisted = self.whitelist.is_empty()
|| self
.whitelist
.iter()
.any(|pattern| pattern.is_match(repo_name));
let blacklisted = self
.blacklist
.as_ref()
.is_some_and(|pattern| pattern.is_match(repo_name));
.iter()
.any(|pattern| pattern.is_match(repo_name));
whitelisted && !blacklisted
}
}
fn compile_repo_pattern(
mirror: &str,
field: &str,
pattern: &Option<String>,
) -> Result<Option<Regex>> {
let Some(pattern) = pattern
.as_deref()
.map(str::trim)
.filter(|pattern| !pattern.is_empty())
else {
return Ok(None);
};
Regex::new(pattern)
.with_context(|| format!("mirror '{mirror}' has invalid {field} regex '{pattern}'"))
.map(Some)
fn compile_repo_patterns(mirror: &str, field: &str, patterns: &[String]) -> Result<Vec<Regex>> {
patterns
.iter()
.map(|pattern| {
Regex::new(pattern)
.with_context(|| format!("mirror '{mirror}' has invalid {field} regex '{pattern}'"))
})
.collect()
}
fn default_true() -> bool {
-79
View File
@@ -52,13 +52,6 @@ pub struct BranchUpdate {
pub force: bool,
}
#[derive(Clone, Debug)]
pub struct RefBackup {
pub refname: String,
pub sha: String,
pub description: String,
}
#[derive(Clone, Debug)]
pub struct BranchRebaseDecision {
pub branch: String,
@@ -427,63 +420,6 @@ impl GitMirror {
Ok(())
}
pub fn backup_refs(&self, backups: &[RefBackup]) -> Result<Vec<String>> {
let mut refs = Vec::new();
for backup in backups {
crate::logln!(
" {} {}",
style("backup").cyan().bold(),
style(&backup.description).dim()
);
self.run(["update-ref", &backup.refname, &backup.sha])?;
refs.push(backup.refname.clone());
}
Ok(refs)
}
pub fn create_bundle(&self, path: &Path, refs: &[String]) -> Result<bool> {
if refs.is_empty() {
return Ok(false);
}
if self.dry_run {
crate::logln!(
" {} git bundle create {} {}",
style("dry-run").yellow().bold(),
style(path.display()).dim(),
style(refs.join(" ")).dim()
);
return Ok(false);
}
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
let output = self
.command()
.arg("bundle")
.arg("create")
.arg(path)
.args(refs)
.output()
.with_context(|| "failed to run git bundle create")?;
if !output.status.success() {
let stdout = self
.redactor
.redact(&String::from_utf8_lossy(&output.stdout));
let stderr = self
.redactor
.redact(&String::from_utf8_lossy(&output.stderr));
return Err(GitCommandError::new("git bundle create", stdout, stderr).into());
}
crate::logln!(
" {} {}",
style("backup bundle").cyan().bold(),
style(path.display()).dim()
);
Ok(true)
}
fn push_branch_update(&self, remote: &RemoteSpec, update: &BranchUpdate) -> Result<()> {
let refspec = if update.force {
format!("+{}:refs/heads/{}", update.sha, update.branch)
@@ -816,13 +752,6 @@ pub fn is_disabled_repository_error(error: &anyhow::Error) -> bool {
.any(|error| is_disabled_repository_stderr(error.stderr()))
}
pub fn is_missing_repository_error(error: &anyhow::Error) -> bool {
error
.chain()
.filter_map(|cause| cause.downcast_ref::<GitCommandError>())
.any(|error| is_missing_repository_stderr(error.stderr()))
}
fn missing_remotes(all_remote_names: &[String], source_remotes: &[String]) -> Vec<String> {
all_remote_names
.iter()
@@ -839,14 +768,6 @@ fn is_disabled_repository_stderr(stderr: &str) -> bool {
|| stderr.contains("dmca takedown")
}
fn is_missing_repository_stderr(stderr: &str) -> bool {
let stderr = stderr.to_ascii_lowercase();
(stderr.contains("repository") && stderr.contains("not found"))
|| stderr.contains("project you were looking for could not be found")
|| stderr.contains("does not appear to be a git repository")
|| stderr.contains("the requested url returned error: 404")
}
impl Redactor {
pub fn new(secrets: Vec<String>) -> Self {
let secrets = secrets
+38 -82
View File
@@ -38,8 +38,8 @@ struct ParsedProfileUrl {
#[derive(Clone, Debug, Default)]
struct RepoFilterInput {
whitelist: Option<String>,
blacklist: Option<String>,
whitelist: Vec<String>,
blacklist: Vec<String>,
}
pub fn run_config_wizard(path: &Path) -> Result<ConfigWizardOutcome> {
@@ -115,9 +115,6 @@ fn add_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<(
let endpoints = prompt_sync_group_endpoints_styled(config, theme, &[])?;
let sync_visibility = prompt_sync_visibility_styled(theme, None)?;
let repo_filters = prompt_repo_filters_styled(theme, None)?;
print_deletion_backup_notice_styled();
let create_missing = prompt_create_missing_styled(theme, None)?;
let delete_missing = prompt_delete_missing_styled(theme, None)?;
let conflict_resolution = prompt_conflict_resolution_styled(theme, None)?;
config.upsert_mirror(MirrorConfig {
name: next_mirror_name(config),
@@ -125,8 +122,7 @@ fn add_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<(
sync_visibility,
repo_whitelist: repo_filters.whitelist,
repo_blacklist: repo_filters.blacklist,
create_missing,
delete_missing,
create_missing: true,
visibility: Visibility::Private,
conflict_resolution,
});
@@ -451,8 +447,6 @@ fn edit_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<
let existing_sync_visibility = config.mirrors[index].sync_visibility.clone();
let existing_repo_whitelist = config.mirrors[index].repo_whitelist.clone();
let existing_repo_blacklist = config.mirrors[index].repo_blacklist.clone();
let existing_create_missing = config.mirrors[index].create_missing;
let existing_delete_missing = config.mirrors[index].delete_missing;
let existing_conflict_resolution = config.mirrors[index].conflict_resolution.clone();
let endpoints = prompt_sync_group_endpoints_styled(config, theme, &existing)?;
let sync_visibility = prompt_sync_visibility_styled(theme, Some(&existing_sync_visibility))?;
@@ -461,17 +455,12 @@ fn edit_sync_group_styled(config: &mut Config, theme: &ColorfulTheme) -> Result<
blacklist: existing_repo_blacklist,
};
let repo_filters = prompt_repo_filters_styled(theme, Some(&existing_repo_filters))?;
print_deletion_backup_notice_styled();
let create_missing = prompt_create_missing_styled(theme, Some(existing_create_missing))?;
let delete_missing = prompt_delete_missing_styled(theme, Some(existing_delete_missing))?;
let conflict_resolution =
prompt_conflict_resolution_styled(theme, Some(&existing_conflict_resolution))?;
config.mirrors[index].endpoints = endpoints;
config.mirrors[index].sync_visibility = sync_visibility;
config.mirrors[index].repo_whitelist = repo_filters.whitelist;
config.mirrors[index].repo_blacklist = repo_filters.blacklist;
config.mirrors[index].create_missing = create_missing;
config.mirrors[index].delete_missing = delete_missing;
config.mirrors[index].conflict_resolution = conflict_resolution;
prompt_webhook_setup_styled(config, theme)?;
println!(
@@ -791,7 +780,7 @@ fn prompt_repo_filters_styled(
existing: Option<&RepoFilterInput>,
) -> Result<RepoFilterInput> {
let existing = existing.cloned().unwrap_or_default();
let has_existing = existing.whitelist.is_some() || existing.blacklist.is_some();
let has_existing = !existing.whitelist.is_empty() || !existing.blacklist.is_empty();
if !Confirm::with_theme(theme)
.with_prompt("Configure repository name whitelist/blacklist?")
.default(has_existing)
@@ -801,69 +790,51 @@ fn prompt_repo_filters_styled(
}
Ok(RepoFilterInput {
whitelist: prompt_repo_pattern_styled(
whitelist: prompt_repo_pattern_list_styled(
theme,
"Whitelist regex (empty means all repo names)",
"Whitelist regexes (comma-separated, empty means all repo names)",
&existing.whitelist,
)?,
blacklist: prompt_repo_pattern_styled(theme, "Blacklist regex", &existing.blacklist)?,
blacklist: prompt_repo_pattern_list_styled(
theme,
"Blacklist regexes (comma-separated)",
&existing.blacklist,
)?,
})
}
fn prompt_repo_pattern_styled(
fn prompt_repo_pattern_list_styled(
theme: &ColorfulTheme,
prompt: &str,
existing: &Option<String>,
) -> Result<Option<String>> {
existing: &[String],
) -> Result<Vec<String>> {
let input = Input::<String>::with_theme(theme)
.with_prompt(prompt)
.allow_empty(true)
.validate_with(|value: &String| validate_repo_pattern(value));
let input = if let Some(existing) = existing {
input.default(existing.clone())
} else {
.validate_with(|value: &String| validate_repo_pattern_list(value));
let input = if existing.is_empty() {
input
} else {
input.default(existing.join(", "))
};
let value = input.interact_text()?;
Ok(parse_repo_pattern(&value))
Ok(parse_repo_pattern_list(&value))
}
fn print_deletion_backup_notice_styled() {
println!();
println!(
"{} {}",
style("Deletion backups").cyan().bold(),
style("refray keeps a local backup before propagating repository or branch deletes").dim()
);
}
fn prompt_create_missing_styled(theme: &ColorfulTheme, existing: Option<bool>) -> Result<bool> {
Confirm::with_theme(theme)
.with_prompt("Create repositories that are missing from an endpoint?")
.default(existing.unwrap_or(true))
.interact()
.map_err(Into::into)
}
fn prompt_delete_missing_styled(theme: &ColorfulTheme, existing: Option<bool>) -> Result<bool> {
Confirm::with_theme(theme)
.with_prompt("When a previously synced repository is deleted from one endpoint, delete it everywhere?")
.default(existing.unwrap_or(true))
.interact()
.map_err(Into::into)
}
fn validate_repo_pattern(value: &str) -> std::result::Result<(), String> {
let Some(pattern) = parse_repo_pattern(value) else {
return Ok(());
};
Regex::new(&pattern).map_err(|error| format!("invalid regex '{pattern}': {error}"))?;
fn validate_repo_pattern_list(value: &str) -> std::result::Result<(), String> {
for pattern in parse_repo_pattern_list(value) {
Regex::new(&pattern).map_err(|error| format!("invalid regex '{pattern}': {error}"))?;
}
Ok(())
}
fn parse_repo_pattern(value: &str) -> Option<String> {
let value = value.trim();
(!value.is_empty()).then(|| value.to_string())
fn parse_repo_pattern_list(value: &str) -> Vec<String> {
value
.split(',')
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.collect()
}
fn sync_visibility_index(sync_visibility: &SyncVisibility) -> usize {
@@ -949,11 +920,10 @@ fn sync_group_summary(config: &Config, mirror: &MirrorConfig) -> String {
.collect::<Vec<_>>()
.join(" <-> ");
format!(
"{} ({}, {}, {}, {})",
"{} ({}, {}, {})",
endpoints,
sync_visibility_label(&mirror.sync_visibility),
repo_filter_label(mirror),
repo_lifecycle_label(mirror),
conflict_resolution_label(&mirror.conflict_resolution)
)
}
@@ -967,28 +937,14 @@ fn sync_visibility_label(sync_visibility: &SyncVisibility) -> &'static str {
}
fn repo_filter_label(mirror: &MirrorConfig) -> String {
match (&mirror.repo_whitelist, &mirror.repo_blacklist) {
(None, None) => "repos: all names".to_string(),
(Some(_), None) => "repos: whitelist".to_string(),
(None, Some(_)) => "repos: blacklist".to_string(),
(Some(_), Some(_)) => "repos: whitelist + blacklist".to_string(),
}
}
fn repo_lifecycle_label(mirror: &MirrorConfig) -> String {
format!(
"missing: {}, deletes: {}",
if mirror.create_missing {
"create"
} else {
"skip"
},
if mirror.delete_missing {
"propagate"
} else {
"keep"
match (mirror.repo_whitelist.len(), mirror.repo_blacklist.len()) {
(0, 0) => "repos: all names".to_string(),
(whitelist, 0) => format!("repos: whitelist {whitelist}"),
(0, blacklist) => format!("repos: blacklist {blacklist}"),
(whitelist, blacklist) => {
format!("repos: whitelist {whitelist}, blacklist {blacklist}")
}
)
}
}
fn conflict_resolution_label(strategy: &ConflictResolutionStrategy) -> &'static str {
+23 -169
View File
@@ -1,27 +1,14 @@
use std::cell::RefCell;
use std::fmt;
use std::io::{self, IsTerminal, Write};
use std::io::{self, Write};
use std::sync::{Arc, Mutex, OnceLock};
use console::style;
static OUTPUT: OnceLock<Mutex<OutputState>> = OnceLock::new();
static OUTPUT: OnceLock<Mutex<()>> = OnceLock::new();
thread_local! {
static REPO_LOG: RefCell<Option<ActiveRepoLog>> = const { RefCell::new(None) };
}
#[derive(Default)]
struct OutputState {
status: Option<StatusState>,
}
struct StatusState {
slots: Vec<Option<String>>,
visible: bool,
interactive: bool,
}
#[derive(Clone)]
pub(crate) struct RepoLogContext {
inner: Arc<RepoLog>,
@@ -33,20 +20,9 @@ struct ActiveRepoLog {
}
struct RepoLog {
repo_name: String,
slot: usize,
width: usize,
lines: Mutex<Vec<String>>,
}
pub struct StatusGuard;
impl Drop for StatusGuard {
fn drop(&mut self) {
finish_status_area();
}
}
pub struct RepoLogGuard;
impl Drop for RepoLogGuard {
@@ -55,29 +31,9 @@ impl Drop for RepoLogGuard {
}
}
pub fn start_status_area(slots: usize) -> StatusGuard {
with_output(|output| {
if let Some(status) = output.status.as_mut() {
clear_status(status);
}
output.status = Some(StatusState {
slots: vec![None; slots],
visible: false,
interactive: io::stdout().is_terminal() && slots > 0,
});
if let Some(status) = output.status.as_mut() {
draw_status(status);
}
});
StatusGuard
}
pub fn start_repo_log(repo_name: String, slot: usize, width: usize) -> RepoLogGuard {
pub fn start_repo_log() -> RepoLogGuard {
let context = RepoLogContext {
inner: Arc::new(RepoLog {
repo_name,
slot,
width,
lines: Mutex::new(Vec::new()),
}),
};
@@ -144,31 +100,13 @@ pub fn finish_repo_log() {
std::mem::take(&mut *lines)
};
with_output(|output| {
if let Some(status) = output.status.as_mut() {
clear_status(status);
if repo_log.slot < status.slots.len() {
status.slots[repo_log.slot] = None;
}
}
with_output(|| {
for line in lines {
println!("{line}");
}
if let Some(status) = output.status.as_mut() {
draw_status(status);
}
});
}
pub fn repo_prefix(repo_name: &str, width: usize) -> String {
let mut prefix = repo_name.chars().take(width).collect::<String>();
if repo_name.chars().count() > width && width > 0 {
prefix.pop();
prefix.push('~');
}
format!("{prefix:<width$}")
}
pub fn line(args: fmt::Arguments<'_>) {
let text = args.to_string();
let context = current_repo_log_context();
@@ -177,119 +115,35 @@ pub fn line(args: fmt::Arguments<'_>) {
return;
}
with_output(|output| {
if let Some(status) = output.status.as_mut() {
clear_status(status);
}
with_output(|| {
println!("{text}");
if let Some(status) = output.status.as_mut() {
draw_status(status);
}
});
}
fn capture_repo_line(context: &RepoLogContext, text: &str) {
let mut status_updates = Vec::new();
{
let mut lines = context
.inner
.lines
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
if text.is_empty() {
lines.push(String::new());
return;
}
for line in text.lines() {
lines.push(line.to_string());
if !line.trim().is_empty() {
status_updates.push(line.trim().to_string());
}
}
}
for line in status_updates {
update_status(context, &line);
}
}
fn update_status(context: &RepoLogContext, line: &str) {
let repo_log = &context.inner;
let repo = repo_prefix(&repo_log.repo_name, repo_log.width);
let line = truncate_status(line, 96);
with_output(|output| {
let Some(status) = output.status.as_mut() else {
return;
};
if repo_log.slot >= status.slots.len() {
return;
}
clear_status(status);
status.slots[repo_log.slot] = Some(format!(
"{} {} {}",
style(format!("worker {}", repo_log.slot + 1)).dim(),
style(repo).cyan().bold(),
line
));
draw_status(status);
});
}
fn truncate_status(value: &str, max_chars: usize) -> String {
if value.chars().count() <= max_chars {
return value.to_string();
}
let mut output = value.chars().take(max_chars).collect::<String>();
output.pop();
output.push('~');
output
}
fn finish_status_area() {
with_output(|output| {
if let Some(status) = output.status.as_mut() {
clear_status(status);
}
output.status = None;
});
}
fn with_output(action: impl FnOnce(&mut OutputState)) {
let output = OUTPUT.get_or_init(|| Mutex::new(OutputState::default()));
let mut output = output
let mut lines = context
.inner
.lines
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
action(&mut output);
if text.is_empty() {
lines.push(String::new());
return;
}
for line in text.lines() {
lines.push(line.to_string());
}
}
fn with_output(action: impl FnOnce()) {
let output = OUTPUT.get_or_init(|| Mutex::new(()));
let _output = output
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
action();
let _ = io::stdout().flush();
}
fn clear_status(status: &mut StatusState) {
if !status.interactive || !status.visible {
return;
}
let lines = status.slots.len();
print!("\x1b[{lines}A\r");
for _ in 0..lines {
println!("\x1b[2K");
}
print!("\x1b[{lines}A\r");
status.visible = false;
}
fn draw_status(status: &mut StatusState) {
if !status.interactive {
return;
}
for slot in &status.slots {
match slot {
Some(line) => println!("{line}"),
None => println!("{}", style("idle").dim()),
}
}
status.visible = true;
}
#[macro_export]
macro_rules! logln {
() => {
+9 -105
View File
@@ -40,12 +40,6 @@ pub struct PullRequestInfo {
pub url: Option<String>,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum WebhookInstallOutcome {
Created,
Existing,
}
pub fn list_mirror_repos(
config: &Config,
mirror: &MirrorConfig,
@@ -172,26 +166,13 @@ impl<'a> ProviderClient<'a> {
)
}
pub fn set_default_branch(
&self,
endpoint: &EndpointConfig,
repo_name: &str,
branch: &str,
) -> Result<()> {
dispatch_provider!(self.site.provider,
github => self.github_set_default_branch(endpoint, repo_name, branch),
gitlab => self.gitlab_set_default_branch(endpoint, repo_name, branch),
gitea_like => self.gitea_set_default_branch(endpoint, repo_name, branch),
)
}
pub fn install_webhook(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
url: &str,
secret: &str,
) -> Result<WebhookInstallOutcome> {
) -> Result<()> {
dispatch_provider!(self.site.provider,
github => self.github_install_webhook(endpoint, repo, url, secret),
gitlab => self.gitlab_install_webhook(endpoint, repo, url, secret),
@@ -330,24 +311,13 @@ impl<'a> ProviderClient<'a> {
self.delete(&url).map(|_| ())
}
fn github_set_default_branch(
&self,
endpoint: &EndpointConfig,
repo_name: &str,
branch: &str,
) -> Result<()> {
let url = self.repo_url(endpoint, repo_name, "GitHub")?;
self.patch_json::<serde_json::Value>(&url, &json!({ "default_branch": branch }))
.map(|_| ())
}
fn github_install_webhook(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
url: &str,
secret: &str,
) -> Result<WebhookInstallOutcome> {
) -> Result<()> {
let hooks_url = self.repo_hooks_url(endpoint, &repo.name, "GitHub")?;
let body = json!({
"name": "web",
@@ -465,11 +435,7 @@ impl<'a> ProviderClient<'a> {
projects.push(project);
}
}
Ok(projects
.into_iter()
.filter(|project| !project.is_deletion_scheduled())
.map(Into::into)
.collect())
Ok(projects.into_iter().map(Into::into).collect())
}
NamespaceKind::Org | NamespaceKind::Group => {
let encoded = urlencoding(&endpoint.namespace);
@@ -478,12 +444,7 @@ impl<'a> ProviderClient<'a> {
self.site.api_base(),
encoded
);
Ok(self
.paged_get::<GitlabProject>(&url)?
.into_iter()
.filter(|project| !project.is_deletion_scheduled())
.map(Into::into)
.collect())
self.paged_remote_repos::<GitlabProject>(&url)
}
}
}
@@ -535,17 +496,6 @@ impl<'a> ProviderClient<'a> {
self.delete(&url).map(|_| ())
}
fn gitlab_set_default_branch(
&self,
endpoint: &EndpointConfig,
repo_name: &str,
branch: &str,
) -> Result<()> {
let url = self.gitlab_project_url(endpoint, repo_name);
self.put_json::<serde_json::Value>(&url, &json!({ "default_branch": branch }))
.map(|_| ())
}
fn gitlab_group(&self, namespace: &str) -> Result<GitlabGroup> {
let url = format!("{}/groups/{}", self.site.api_base(), urlencoding(namespace));
self.get_json(&url)
@@ -576,7 +526,7 @@ impl<'a> ProviderClient<'a> {
repo: &RemoteRepo,
url: &str,
secret: &str,
) -> Result<WebhookInstallOutcome> {
) -> Result<()> {
let hooks_url = self.gitlab_hooks_url(endpoint, &repo.name);
let body = json!({
"url": url,
@@ -739,24 +689,13 @@ impl<'a> ProviderClient<'a> {
self.delete(&url).map(|_| ())
}
fn gitea_set_default_branch(
&self,
endpoint: &EndpointConfig,
repo_name: &str,
branch: &str,
) -> Result<()> {
let url = self.repo_url(endpoint, repo_name, "Gitea/Forgejo")?;
self.patch_json::<serde_json::Value>(&url, &json!({ "default_branch": branch }))
.map(|_| ())
}
fn gitea_install_webhook(
&self,
endpoint: &EndpointConfig,
repo: &RemoteRepo,
url: &str,
secret: &str,
) -> Result<WebhookInstallOutcome> {
) -> Result<()> {
let hooks_url = self.repo_hooks_url(endpoint, &repo.name, "Gitea/Forgejo")?;
let body = json!({
"type": "gitea",
@@ -936,10 +875,10 @@ impl<'a> ProviderClient<'a> {
target_url: &str,
body: &serde_json::Value,
put_on_update: bool,
) -> Result<WebhookInstallOutcome> {
) -> Result<()> {
let Some(hook) = self.find_existing_hook(hooks_url, target_url)? else {
self.post_json::<serde_json::Value>(hooks_url, body)?;
return Ok(WebhookInstallOutcome::Created);
return Ok(());
};
let update_url = format!("{hooks_url}/{}", hook.id);
@@ -948,7 +887,7 @@ impl<'a> ProviderClient<'a> {
} else {
self.patch_json::<serde_json::Value>(&update_url, body)?;
}
Ok(WebhookInstallOutcome::Existing)
Ok(())
}
fn delete_matching_hook(&self, hooks_url: &str, target_url: &str) -> Result<bool> {
@@ -1218,10 +1157,6 @@ struct GitlabProject {
http_url_to_repo: String,
visibility: String,
description: Option<String>,
marked_for_deletion_at: Option<String>,
marked_for_deletion_on: Option<String>,
#[serde(default)]
pending_delete: bool,
}
impl GitlabProject {
@@ -1249,37 +1184,6 @@ impl GitlabProject {
.eq_ignore_ascii_case(other.project_path()),
}
}
fn is_deletion_scheduled(&self) -> bool {
self.pending_delete
|| self
.marked_for_deletion_at
.as_deref()
.is_some_and(|value| !value.is_empty())
|| self
.marked_for_deletion_on
.as_deref()
.is_some_and(|value| !value.is_empty())
|| is_gitlab_deletion_scheduled_path(&self.name)
|| self
.path
.as_deref()
.is_some_and(is_gitlab_deletion_scheduled_path)
|| self
.path_with_namespace
.as_deref()
.and_then(|path| path.rsplit('/').next())
.is_some_and(is_gitlab_deletion_scheduled_path)
}
}
fn is_gitlab_deletion_scheduled_path(path: &str) -> bool {
let Some((name, project_id)) = path.rsplit_once("-deletion_scheduled-") else {
return false;
};
!name.is_empty()
&& !project_id.is_empty()
&& project_id.bytes().all(|byte| byte.is_ascii_digit())
}
#[derive(Deserialize)]
+28 -753
View File
@@ -3,7 +3,6 @@ use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, mpsc};
use std::thread;
use std::time::{SystemTime, UNIX_EPOCH};
use anyhow::{Context, Result, bail};
use console::style;
@@ -11,11 +10,11 @@ use regex::Regex;
use crate::config::{
Config, ConflictResolutionStrategy, DEFAULT_JOBS, EndpointConfig, MirrorConfig, NamespaceKind,
ProviderKind, RepoNameFilter, SyncVisibility, Visibility, default_work_dir, validate_config,
RepoNameFilter, SyncVisibility, Visibility, default_work_dir, validate_config,
};
use crate::git::{
BranchConflict, BranchDeletion, BranchUpdate, GitMirror, Redactor, RefBackup, RemoteSpec,
is_disabled_repository_error, is_missing_repository_error, ls_remote_refs, safe_remote_name,
BranchConflict, BranchDeletion, BranchUpdate, GitMirror, Redactor, RemoteSpec,
is_disabled_repository_error, ls_remote_refs, safe_remote_name,
};
use crate::logging;
use crate::provider::{
@@ -38,7 +37,6 @@ use self::state::{
};
const CONFLICT_BRANCH_ROOT: &str = "refray/conflicts/";
const DEFAULT_BRANCH: &str = "main";
#[derive(Clone, Debug)]
pub struct SyncOptions {
@@ -141,83 +139,6 @@ pub fn sync_all(config: &Config, options: SyncOptions) -> Result<()> {
Ok(())
}
pub fn sync_webhook_repo(
config: &Config,
group: &str,
repo_name: &str,
work_dir: Option<PathBuf>,
jobs: usize,
) -> Result<()> {
validate_config(config)?;
if jobs == 0 {
bail!("jobs must be at least 1");
}
let work_dir = work_dir.unwrap_or_else(default_work_dir);
fs::create_dir_all(&work_dir)
.with_context(|| format!("failed to create {}", work_dir.display()))?;
let mirror = config
.mirrors
.iter()
.find(|mirror| mirror.name == group)
.with_context(|| format!("no mirror group matched '{group}'"))?;
let repo_filter = mirror.repo_filter()?;
if !repo_filter.matches(repo_name) {
crate::logln!(
" {} {} does not match configured repository filters",
style("skip").yellow().bold(),
style(repo_name).cyan()
);
return Ok(());
}
let tokens = config
.sites
.iter()
.map(|site| site.token())
.collect::<Result<Vec<_>>>()?;
let redactor = Redactor::new(tokens);
let mut ref_state = load_ref_state(&work_dir)?;
crate::logln!();
crate::logln!(
"{} {}",
style("Mirror group").cyan().bold(),
style(&mirror.name).bold()
);
let mut repos = targeted_endpoint_repos(config, mirror, repo_name)?;
let context = RepoSyncContext {
config,
mirror,
work_dir: &work_dir,
redactor,
dry_run: false,
jobs,
};
let outcome = sync_assumed_repo(
&context,
repo_name,
&mut repos,
mirror.create_missing,
&ref_state,
)?;
if !outcome.created_repos.is_empty() {
webhook::ensure_configured_webhooks(
config,
mirror,
&outcome.created_repos,
&work_dir,
jobs,
)?;
}
if let Some(update) = outcome.state_update {
match update {
RepoStateUpdate::Set(refs) => ref_state.set_repo(&mirror.name, repo_name, refs),
RepoStateUpdate::Remove => ref_state.remove_repo(&mirror.name, repo_name),
}
save_ref_state(&work_dir, &ref_state)?;
}
Ok(())
}
struct GroupSyncContext<'a> {
config: &'a Config,
options: &'a SyncOptions,
@@ -316,7 +237,6 @@ fn sync_group(
);
}
let repo_log_width = repo_log_width(&repo_names);
let repo_jobs = repo_names
.into_iter()
.map(|repo_name| {
@@ -339,11 +259,10 @@ fn sync_group(
let base_ref_state = context.ref_state.clone();
let queue = Arc::new(Mutex::new(repo_jobs));
let (sender, receiver) = mpsc::channel();
let use_status_area = worker_count > 1;
let use_repo_logs = worker_count > 1;
let jobs = context.options.jobs;
let _status_guard = use_status_area.then(|| logging::start_status_area(worker_count));
let failures = thread::scope(|scope| {
for worker_id in 0..worker_count {
for _ in 0..worker_count {
let queue = Arc::clone(&queue);
let sender = sender.clone();
let redactor = context.redactor.clone();
@@ -354,9 +273,7 @@ fn sync_group(
scope.spawn(move || {
while let Some(mut job) = pop_repo_job(&queue) {
let _repo_log_guard = use_status_area.then(|| {
logging::start_repo_log(job.repo_name.clone(), worker_id, repo_log_width)
});
let _repo_log_guard = use_repo_logs.then(logging::start_repo_log);
let repo_context = RepoSyncContext {
config,
mirror,
@@ -462,15 +379,6 @@ fn pop_repo_job(queue: &Arc<Mutex<VecDeque<RepoSyncJob>>>) -> Option<RepoSyncJob
.pop_front()
}
fn repo_log_width(repo_names: &BTreeSet<String>) -> usize {
repo_names
.iter()
.map(|name| name.chars().count())
.max()
.unwrap_or(4)
.clamp(4, 32)
}
struct RepoSyncJob {
repo_name: String,
existing: Vec<EndpointRepo>,
@@ -491,7 +399,7 @@ fn ensure_missing_repos(
repo_name: &str,
existing: &mut Vec<EndpointRepo>,
create_missing: bool,
) -> Result<Vec<EndpointRepo>> {
) -> Result<()> {
let present = existing
.iter()
.map(|repo| repo.endpoint.clone())
@@ -525,19 +433,13 @@ fn ensure_missing_repos(
style(format!("on {}", endpoint.label())).dim()
);
}
return Ok(Vec::new());
return Ok(());
}
let description = template.and_then(|repo| repo.description);
let expected_private = matches!(create_visibility, Visibility::Private);
let create_jobs = missing.into_iter().enumerate().collect::<Vec<_>>();
let created = crate::parallel::map(create_jobs, context.jobs, |(index, endpoint)| {
let site = context.config.site(&endpoint.site).unwrap();
if site.provider == ProviderKind::Gitlab && !is_supported_gitlab_project_path(repo_name) {
log_invalid_gitlab_project_name_skip(repo_name, &endpoint);
return Ok(None);
}
let mut created = crate::parallel::map(create_jobs, context.jobs, |(index, endpoint)| {
crate::logln!(
" {} {} {}",
style("create").green().bold(),
@@ -545,27 +447,16 @@ fn ensure_missing_repos(
style(format!("on {}", endpoint.label())).dim()
);
let site = context.config.site(&endpoint.site).unwrap();
let client = ProviderClient::new(site)?;
let created = match client.create_repo(
&endpoint,
repo_name,
&create_visibility,
description.as_deref(),
) {
Ok(created) => created,
Err(error)
if site.provider == ProviderKind::Gitlab
&& is_gitlab_invalid_project_name_error(&error) =>
{
log_invalid_gitlab_project_name_skip(repo_name, &endpoint);
return Ok(None);
}
Err(error) => {
return Err(error).with_context(|| {
format!("failed to create {} on {}", repo_name, endpoint.label())
});
}
};
let created = client
.create_repo(
&endpoint,
repo_name,
&create_visibility,
description.as_deref(),
)
.with_context(|| format!("failed to create {} on {}", repo_name, endpoint.label()))?;
if created.private != expected_private {
crate::logln!(
" {} created {} on {}, but provider reported a different visibility than requested",
@@ -574,23 +465,18 @@ fn ensure_missing_repos(
style(endpoint.label()).dim()
);
}
Ok(Some((
Ok((
index,
EndpointRepo {
endpoint,
repo: created,
},
)))
))
})?;
let mut created = created.into_iter().flatten().collect::<Vec<_>>();
created.sort_by_key(|(index, _)| *index);
let created = created
.into_iter()
.map(|(_, repo)| repo)
.collect::<Vec<_>>();
existing.extend(created.clone());
existing.extend(created.into_iter().map(|(_, repo)| repo));
Ok(created)
Ok(())
}
fn visibility_for_created_repo(mirror: &MirrorConfig, template: Option<&RemoteRepo>) -> Visibility {
@@ -605,50 +491,6 @@ fn visibility_for_created_repo(mirror: &MirrorConfig, template: Option<&RemoteRe
.unwrap_or_else(|| mirror.visibility.clone())
}
fn is_supported_gitlab_project_path(name: &str) -> bool {
if name.is_empty()
|| matches!(name.chars().next(), Some('-' | '_' | '.'))
|| matches!(name.chars().last(), Some('-' | '_' | '.'))
{
return false;
}
let lower = name.to_ascii_lowercase();
if lower.ends_with(".git") || lower.ends_with(".atom") {
return false;
}
name.chars()
.all(|ch| ch.is_ascii_alphanumeric() || matches!(ch, '_' | '-' | '.'))
}
fn log_invalid_gitlab_project_name_skip(repo_name: &str, endpoint: &EndpointConfig) {
crate::logln!(
" {} {} {}",
style("skip").yellow().bold(),
style(repo_name).cyan(),
style(format!(
"on {}: invalid GitLab project name/path",
endpoint.label()
))
.dim()
);
}
fn is_gitlab_invalid_project_name_error(error: &anyhow::Error) -> bool {
let text = error
.chain()
.map(ToString::to_string)
.collect::<Vec<_>>()
.join("\n")
.to_ascii_lowercase();
text.contains("400 bad request")
&& (text.contains("project_namespace.path")
|| text.contains("can only include non-accented letters")
|| text.contains("must not start with")
|| text.contains("must start with a letter"))
}
struct RepoSyncContext<'a> {
config: &'a Config,
mirror: &'a MirrorConfig,
@@ -661,7 +503,6 @@ struct RepoSyncContext<'a> {
#[derive(Default)]
struct RepoSyncOutcome {
state_update: Option<RepoStateUpdate>,
created_repos: Vec<EndpointRepo>,
}
enum RepoStateUpdate {
@@ -669,53 +510,6 @@ enum RepoStateUpdate {
Remove,
}
fn mirror_repo_path(context: &RepoSyncContext<'_>, repo_name: &str) -> PathBuf {
context
.work_dir
.join(safe_remote_name(&context.mirror.name))
.join(format!("{}.git", safe_remote_name(repo_name)))
}
fn targeted_endpoint_repos(
config: &Config,
mirror: &MirrorConfig,
repo_name: &str,
) -> Result<Vec<EndpointRepo>> {
mirror
.endpoints
.iter()
.map(|endpoint| {
let site = config.site(&endpoint.site).unwrap();
Ok(EndpointRepo {
endpoint: endpoint.clone(),
repo: RemoteRepo {
name: repo_name.to_string(),
clone_url: endpoint_clone_url(site, endpoint, repo_name)?,
private: matches!(mirror.visibility, Visibility::Private),
description: None,
},
})
})
.collect()
}
fn endpoint_clone_url(
site: &crate::config::SiteConfig,
endpoint: &EndpointConfig,
repo_name: &str,
) -> Result<String> {
let mut url = url::Url::parse(&site.base_url)
.with_context(|| format!("invalid base URL for site '{}'", site.name))?;
let base_path = url.path().trim_end_matches('/');
let repo_path = format!("{}/{}.git", endpoint.namespace.trim_matches('/'), repo_name);
if base_path.is_empty() {
url.set_path(&repo_path);
} else {
url.set_path(&format!("{base_path}/{repo_path}"));
}
Ok(url.to_string())
}
fn sync_repo(
context: &RepoSyncContext<'_>,
repo_name: &str,
@@ -766,18 +560,14 @@ fn sync_repo(
return Ok(RepoSyncOutcome::default());
}
let path = mirror_repo_path(context, repo_name);
let path = context
.work_dir
.join(safe_remote_name(&context.mirror.name))
.join(format!("{}.git", safe_remote_name(repo_name)));
let mirror_repo = GitMirror::open(path, context.redactor.clone(), context.dry_run)?;
mirror_repo.configure_remotes(&initial_remotes)?;
let cached_ref_state = cached_ref_state(&mirror_repo, &initial_remotes)?;
backup_branches_deleted_everywhere(
context,
&mirror_repo,
repo_name,
detailed_repo_ref_state(previous_repo_refs).or(cached_ref_state.as_ref()),
&initial_ref_state,
)?;
for remote in &initial_remotes {
if let Err(error) = mirror_repo.fetch_remote(remote) {
if is_disabled_repository_error(&error) {
@@ -793,7 +583,7 @@ fn sync_repo(
}
}
let created_repos = ensure_missing_repos(context, repo_name, repos, create_missing)?;
ensure_missing_repos(context, repo_name, repos, create_missing)?;
if repos.len() < 2 {
crate::logln!(
@@ -832,7 +622,6 @@ fn sync_repo(
let result = push_repo_refs(
context,
&mirror_repo,
repo_name,
&remotes,
repos,
detailed_repo_ref_state(previous_repo_refs).or(cached_ref_state.as_ref()),
@@ -843,193 +632,15 @@ fn sync_repo(
let Some(refs) = check_remote_refs(context, repo_name, &remotes)? else {
return Ok(RepoSyncOutcome::default());
};
set_default_branch_for_created_repos(context, repo_name, &created_repos, &refs)?;
refs
} else {
initial_ref_state
};
return Ok(RepoSyncOutcome {
state_update: Some(RepoStateUpdate::Set(refs)),
created_repos,
});
}
Ok(RepoSyncOutcome {
created_repos,
..RepoSyncOutcome::default()
})
}
fn set_default_branch_for_created_repos(
context: &RepoSyncContext<'_>,
repo_name: &str,
created_repos: &[EndpointRepo],
refs: &BTreeMap<String, RemoteRefState>,
) -> Result<()> {
if created_repos.is_empty() {
return Ok(());
}
let targets = created_repos
.iter()
.filter(|repo| {
refs.get(&remote_name_for_endpoint_repo(repo))
.is_some_and(|refs| refs.branches.contains_key(DEFAULT_BRANCH))
})
.cloned()
.collect::<Vec<_>>();
crate::parallel::map(targets, context.jobs, |repo| {
crate::logln!(
" {} branch {} {}",
style("default").green().bold(),
style(DEFAULT_BRANCH).cyan(),
style(format!("on {}", repo.endpoint.label())).dim()
);
let site = context.config.site(&repo.endpoint.site).unwrap();
ProviderClient::new(site)?
.set_default_branch(&repo.endpoint, repo_name, DEFAULT_BRANCH)
.with_context(|| {
format!(
"failed to set default branch for {} on {}",
repo_name,
repo.endpoint.label()
)
})?;
Ok(())
})?;
Ok(())
}
fn sync_assumed_repo(
context: &RepoSyncContext<'_>,
repo_name: &str,
repos: &mut [EndpointRepo],
create_missing: bool,
ref_state: &RefState,
) -> Result<RepoSyncOutcome> {
crate::logln!();
crate::logln!(
"{} {}",
style("Repo").magenta().bold(),
style(repo_name).bold()
);
let previous_repo_refs = ref_state.repo(&context.mirror.name, repo_name);
let all_remotes = remote_specs(context, repos)?;
let Some(initial_ref_check) = check_assumed_remote_refs(context, repo_name, &all_remotes)?
else {
return Ok(RepoSyncOutcome::default());
};
if initial_ref_check.refs.is_empty() {
crate::logln!(
" {} {}",
style("skip").yellow().bold(),
style("repository not found on any endpoint").dim()
);
return Ok(RepoSyncOutcome::default());
}
let existing_remote_names = initial_ref_check
.refs
.keys()
.cloned()
.collect::<BTreeSet<_>>();
let mut existing_repos = repos
.iter()
.filter(|repo| existing_remote_names.contains(&remote_name_for_endpoint_repo(repo)))
.cloned()
.collect::<Vec<_>>();
let existing_remotes = all_remotes
.iter()
.filter(|remote| existing_remote_names.contains(&remote.name))
.cloned()
.collect::<Vec<_>>();
let path = mirror_repo_path(context, repo_name);
let mirror_repo = GitMirror::open(path, context.redactor.clone(), context.dry_run)?;
mirror_repo.configure_remotes(&all_remotes)?;
let cached_ref_state = cached_ref_state(&mirror_repo, &existing_remotes)?;
backup_branches_deleted_everywhere(
context,
&mirror_repo,
repo_name,
detailed_repo_ref_state(previous_repo_refs).or(cached_ref_state.as_ref()),
&initial_ref_check.refs,
)?;
for remote in &existing_remotes {
if let Err(error) = mirror_repo.fetch_remote(remote) {
if is_disabled_repository_error(&error) {
crate::logln!(
" {} {} {}",
style("skip").yellow().bold(),
style(repo_name).cyan(),
style(format!("provider blocked access on {}", remote.display)).dim()
);
return Ok(RepoSyncOutcome::default());
}
if is_missing_repository_error(&error) {
crate::logln!(
" {} {} {}",
style("missing").yellow().bold(),
style(repo_name).cyan(),
style(format!("on {}", remote.display)).dim()
);
existing_repos.retain(|repo| remote_name_for_endpoint_repo(repo) != remote.name);
continue;
}
return Err(error).with_context(|| format!("failed to fetch {}", remote.display));
}
}
let created_repos =
ensure_missing_repos(context, repo_name, &mut existing_repos, create_missing)?;
if existing_repos.len() < 2 {
crate::logln!(
" {} {} {}",
style("skip").yellow().bold(),
style(repo_name).cyan(),
style("fewer than two endpoints have this repository").dim()
);
return Ok(RepoSyncOutcome {
created_repos,
..RepoSyncOutcome::default()
});
}
let remotes = remote_specs(context, &existing_repos)?;
mirror_repo.configure_remotes(&remotes)?;
let result = push_repo_refs(
context,
&mirror_repo,
repo_name,
&remotes,
&existing_repos,
detailed_repo_ref_state(previous_repo_refs).or(cached_ref_state.as_ref()),
&initial_ref_check.refs,
)?;
if !context.dry_run && !result.had_conflicts {
let refs = if result.pushed {
let Some(refs) = check_remote_refs(context, repo_name, &remotes)? else {
return Ok(RepoSyncOutcome {
created_repos,
..RepoSyncOutcome::default()
});
};
set_default_branch_for_created_repos(context, repo_name, &created_repos, &refs)?;
refs
} else {
initial_ref_check.refs
};
return Ok(RepoSyncOutcome {
state_update: Some(RepoStateUpdate::Set(refs)),
created_repos,
});
}
Ok(RepoSyncOutcome {
created_repos,
..RepoSyncOutcome::default()
})
Ok(RepoSyncOutcome::default())
}
fn handle_repo_deletion(
@@ -1058,10 +669,8 @@ fn handle_repo_deletion(
style(repo_name).cyan(),
deleted_remotes.join("+")
);
backup_deleted_repo(context, repo_name, repos, previous_refs, current_refs)?;
Ok(Some(RepoSyncOutcome {
state_update: (!context.dry_run).then_some(RepoStateUpdate::Remove),
..RepoSyncOutcome::default()
}))
}
RepoDeletionDecision::Propagate {
@@ -1075,11 +684,9 @@ fn handle_repo_deletion(
deleted_remotes.join("+"),
target_remotes.join("+")
);
backup_deleted_repo(context, repo_name, repos, previous_refs, current_refs)?;
delete_repos(context, repo_name, repos, &target_remotes)?;
Ok(Some(RepoSyncOutcome {
state_update: (!context.dry_run).then_some(RepoStateUpdate::Remove),
..RepoSyncOutcome::default()
}))
}
RepoDeletionDecision::Conflict {
@@ -1100,65 +707,6 @@ fn handle_repo_deletion(
}
}
fn backup_deleted_repo(
context: &RepoSyncContext<'_>,
repo_name: &str,
repos: &[EndpointRepo],
previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<()> {
if context.dry_run {
crate::logln!(
" {} {} {}",
style("dry-run").yellow().bold(),
style("would create local backup for deleted repo").dim(),
style(repo_name).cyan()
);
return Ok(());
}
let path = mirror_repo_path(context, repo_name);
if repos.is_empty() && !path.exists() {
bail!(
"cannot back up deleted repo {} because local mirror cache {} is missing",
repo_name,
path.display()
);
}
let mirror_repo = GitMirror::open(path, context.redactor.clone(), false)?;
if !repos.is_empty() {
let remotes = remote_specs(context, repos)?;
mirror_repo.configure_remotes(&remotes)?;
for remote in &remotes {
mirror_repo.fetch_remote(remote).with_context(|| {
format!("failed to fetch {} for deletion backup", remote.display)
})?;
}
}
let stamp = backup_stamp()?;
let refs_to_backup = if current_refs.is_empty() {
previous_refs.unwrap_or(current_refs)
} else {
current_refs
};
let backups = repo_ref_backups(repo_name, refs_to_backup, &stamp);
if backups.is_empty() {
crate::logln!(
" {} {} has no refs to bundle before deletion",
style("backup").yellow().bold(),
style(repo_name).cyan()
);
return Ok(());
}
let refs = mirror_repo.backup_refs(&backups)?;
let bundle_path = backup_dir(context, repo_name).join(format!("repo-{stamp}.bundle"));
mirror_repo.create_bundle(&bundle_path, &refs)?;
Ok(())
}
fn delete_repos(
context: &RepoSyncContext<'_>,
repo_name: &str,
@@ -1285,69 +833,6 @@ fn check_remote_refs(
Ok(Some(refs))
}
struct AssumedRemoteRefState {
refs: BTreeMap<String, RemoteRefState>,
}
fn check_assumed_remote_refs(
context: &RepoSyncContext<'_>,
repo_name: &str,
remotes: &[RemoteSpec],
) -> Result<Option<AssumedRemoteRefState>> {
enum RemoteRefCheck {
Found(String, RemoteRefState),
Missing(String),
Blocked,
}
let ref_jobs = remotes.to_vec();
let results = crate::parallel::map(ref_jobs, context.jobs, |remote| {
crate::logln!(
" {} {}",
style("probe refs").cyan().bold(),
style(&remote.display).dim()
);
match ls_remote_refs(&remote, &context.redactor) {
Ok(snapshot) => Ok(RemoteRefCheck::Found(remote.name, snapshot.into())),
Err(error) if is_missing_repository_error(&error) => {
crate::logln!(
" {} {} {}",
style("missing").yellow().bold(),
style(repo_name).cyan(),
style(format!("on {}", remote.display)).dim()
);
Ok(RemoteRefCheck::Missing(remote.name))
}
Err(error) if is_disabled_repository_error(&error) => {
crate::logln!(
" {} {} {}",
style("skip").yellow().bold(),
style(repo_name).cyan(),
style(format!("provider blocked access on {}", remote.display)).dim()
);
Ok(RemoteRefCheck::Blocked)
}
Err(error) => {
Err(error).with_context(|| format!("failed to check refs for {}", remote.display))
}
}
})?;
let mut refs = BTreeMap::new();
for result in results {
match result {
RemoteRefCheck::Found(remote, refs_for_remote) => {
refs.insert(remote, refs_for_remote);
}
RemoteRefCheck::Missing(remote) => {
let _ = remote;
}
RemoteRefCheck::Blocked => return Ok(None),
}
}
Ok(Some(AssumedRemoteRefState { refs }))
}
fn remote_specs(context: &RepoSyncContext<'_>, repos: &[EndpointRepo]) -> Result<Vec<RemoteSpec>> {
let endpoint_map = context
.mirror
@@ -1376,7 +861,6 @@ fn remote_specs(context: &RepoSyncContext<'_>, repos: &[EndpointRepo]) -> Result
fn push_repo_refs(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
repo_name: &str,
remotes: &[RemoteSpec],
repos: &[EndpointRepo],
previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
@@ -1475,13 +959,6 @@ fn push_repo_refs(
{
if !branch_deletions.is_empty() {
print_branch_deletions(&branch_deletions);
backup_deleted_branches(
context,
mirror_repo,
repo_name,
&branch_deletions,
current_refs,
)?;
mirror_repo.delete_branches(remotes, &branch_deletions)?;
}
if !cleanup_branches.is_empty() {
@@ -1504,13 +981,6 @@ fn push_repo_refs(
}
if !branch_deletions.is_empty() {
print_branch_deletions(&branch_deletions);
backup_deleted_branches(
context,
mirror_repo,
repo_name,
&branch_deletions,
current_refs,
)?;
mirror_repo.delete_branches(remotes, &branch_deletions)?;
}
if !branches_to_push.is_empty() {
@@ -1548,64 +1018,6 @@ fn push_repo_refs(
})
}
fn backup_deleted_branches(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
repo_name: &str,
deletions: &[BranchDeletion],
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<()> {
if context.dry_run {
crate::logln!(
" {} {} deleted branch backup{}",
style("dry-run").yellow().bold(),
style("would create").dim(),
if deletions.len() == 1 { "" } else { "s" }
);
return Ok(());
}
let stamp = backup_stamp()?;
let backups = branch_ref_backups(deletions, current_refs, &stamp);
if backups.is_empty() {
bail!("cannot back up branch deletion because no target branch refs were available");
}
let refs = mirror_repo.backup_refs(&backups)?;
let bundle_path = backup_dir(context, repo_name).join(format!("branches-{stamp}.bundle"));
mirror_repo.create_bundle(&bundle_path, &refs)?;
Ok(())
}
fn backup_branches_deleted_everywhere(
context: &RepoSyncContext<'_>,
mirror_repo: &GitMirror,
repo_name: &str,
previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
current_refs: &BTreeMap<String, RemoteRefState>,
) -> Result<()> {
let Some(previous_refs) = previous_refs else {
return Ok(());
};
let stamp = backup_stamp()?;
let backups = branches_deleted_everywhere_backups(previous_refs, current_refs, &stamp);
if backups.is_empty() {
return Ok(());
}
if context.dry_run {
crate::logln!(
" {} {} branch backup{} for refs deleted everywhere",
style("dry-run").yellow().bold(),
style("would create").dim(),
if backups.len() == 1 { "" } else { "s" }
);
return Ok(());
}
let refs = mirror_repo.backup_refs(&backups)?;
let bundle_path = backup_dir(context, repo_name).join(format!("branches-{stamp}.bundle"));
mirror_repo.create_bundle(&bundle_path, &refs)?;
Ok(())
}
enum BranchConflictResolution {
Rebased(Vec<BranchUpdate>),
PullRequest(BranchConflict),
@@ -1917,140 +1329,6 @@ fn conflict_pr_base_branch(branch: &str) -> Option<String> {
decode_hex_component(encoded)
}
fn backup_dir(context: &RepoSyncContext<'_>, repo_name: &str) -> PathBuf {
context
.work_dir
.join("backups")
.join(safe_remote_name(&context.mirror.name))
.join(safe_remote_name(repo_name))
}
fn backup_stamp() -> Result<String> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.with_context(|| "system clock is before UNIX_EPOCH")?;
Ok(format!("{}-{:09}", now.as_secs(), now.subsec_nanos()))
}
fn branch_ref_backups(
deletions: &[BranchDeletion],
current_refs: &BTreeMap<String, RemoteRefState>,
stamp: &str,
) -> Vec<RefBackup> {
let mut backups = Vec::new();
let mut seen = BTreeSet::new();
for deletion in deletions {
for remote in &deletion.target_remotes {
let Some(sha) = current_refs
.get(remote)
.and_then(|refs| refs.branches.get(&deletion.branch))
else {
continue;
};
if !seen.insert((deletion.branch.clone(), sha.clone())) {
continue;
}
backups.push(RefBackup {
refname: format!(
"refs/refray-backups/branches/{}/{}/{}",
hex_component(&deletion.branch),
stamp,
hex_component(remote)
),
sha: sha.clone(),
description: format!(
"branch {} from {} before propagated deletion",
deletion.branch, remote
),
});
}
}
backups
}
fn branches_deleted_everywhere_backups(
previous_refs: &BTreeMap<String, RemoteRefState>,
current_refs: &BTreeMap<String, RemoteRefState>,
stamp: &str,
) -> Vec<RefBackup> {
let mut branches = BTreeSet::new();
for refs in previous_refs.values() {
branches.extend(
refs.branches
.keys()
.filter(|branch| !is_internal_conflict_branch(branch))
.cloned(),
);
}
let mut backups = Vec::new();
for branch in branches {
if current_refs
.values()
.any(|refs| refs.branches.contains_key(&branch))
{
continue;
}
let mut seen_shas = BTreeSet::new();
for (remote, refs) in previous_refs {
let Some(sha) = refs.branches.get(&branch) else {
continue;
};
if !seen_shas.insert(sha.clone()) {
continue;
}
backups.push(RefBackup {
refname: format!(
"refs/refray-backups/branches/{}/{}/deleted-everywhere-{}",
hex_component(&branch),
stamp,
hex_component(remote)
),
sha: sha.clone(),
description: format!(
"branch {branch} from {remote} before all endpoints pruned it"
),
});
}
}
backups
}
fn repo_ref_backups(
repo_name: &str,
refs_by_remote: &BTreeMap<String, RemoteRefState>,
stamp: &str,
) -> Vec<RefBackup> {
let mut backups = Vec::new();
for (remote, refs) in refs_by_remote {
for (branch, sha) in &refs.branches {
backups.push(RefBackup {
refname: format!(
"refs/refray-backups/repos/{}/{}/heads/{}",
stamp,
hex_component(remote),
hex_component(branch)
),
sha: sha.clone(),
description: format!("repo {repo_name} branch {branch} from {remote}"),
});
}
for (tag, sha) in &refs.tags {
backups.push(RefBackup {
refname: format!(
"refs/refray-backups/repos/{}/{}/tags/{}",
stamp,
hex_component(remote),
hex_component(tag)
),
sha: sha.clone(),
description: format!("repo {repo_name} tag {tag} from {remote}"),
});
}
}
backups
}
fn hex_component(value: &str) -> String {
const HEX: &[u8; 16] = b"0123456789abcdef";
let mut output = String::with_capacity(value.len() * 2);
@@ -2213,9 +1491,6 @@ fn repo_deletion_decision(
previous_refs: Option<&BTreeMap<String, RemoteRefState>>,
current_refs: &BTreeMap<String, RemoteRefState>,
) -> RepoDeletionDecision {
if !mirror.delete_missing {
return RepoDeletionDecision::None;
}
let Some(previous_refs) = previous_refs else {
return RepoDeletionDecision::None;
};
+58 -64
View File
@@ -8,6 +8,7 @@ use std::time::Duration;
use anyhow::{Context, Result, bail};
use console::style;
use hmac::{Hmac, KeyInit, Mac};
use regex::escape;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use sha2::Sha256;
@@ -17,11 +18,9 @@ use crate::config::{
Config, EndpointConfig, MirrorConfig, ProviderKind, RepoNameFilter, default_work_dir,
validate_config,
};
use crate::provider::{
EndpointRepo, ProviderClient, RemoteRepo, WebhookInstallOutcome, list_mirror_repos,
};
use crate::provider::{EndpointRepo, ProviderClient, RemoteRepo, list_mirror_repos};
use crate::state::{load_toml_or_default, save_toml};
use crate::sync::{SyncOptions, sync_all, sync_webhook_repo};
use crate::sync::{SyncOptions, sync_all};
type HmacSha256 = Hmac<Sha256>;
const WEBHOOK_STATE_FILE: &str = "webhook-state.toml";
@@ -152,7 +151,6 @@ fn full_sync_timer_loop(
&config,
SyncOptions {
work_dir: work_dir.clone(),
jobs: config.jobs,
..SyncOptions::default()
},
) {
@@ -377,12 +375,15 @@ fn worker_loop(
let _sync_guard = sync_lock
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
let result = sync_webhook_repo(
let result = sync_all(
&config,
&job.group,
&job.repo,
work_dir.clone(),
config.jobs,
SyncOptions {
group: Some(job.group.clone()),
repo_pattern: Some(format!("^{}$", escape(&job.repo))),
work_dir: work_dir.clone(),
jobs: 1,
..SyncOptions::default()
},
);
match result {
Ok(()) => crate::logln!(
@@ -574,73 +575,66 @@ fn run_uninstall_tasks(tasks: Vec<WebhookUninstallTask>, jobs: usize) -> Result<
fn install_webhook_task(task: WebhookInstallTask, state: &Arc<Mutex<WebhookState>>) -> Result<()> {
let key = webhook_installation_key(&task.group, &task.endpoint, &task.repo.name);
crate::logln!(
" {} {} {}",
style(if task.dry_run {
"would install"
} else {
"install"
})
.green()
.bold(),
style(&task.repo.name).cyan(),
style(format!("webhook on {}", task.endpoint.label())).dim()
);
if task.dry_run {
crate::logln!(
" {} {} {}",
style("would install").green().bold(),
style(&task.repo.name).cyan(),
style(format!("webhook on {}", task.endpoint.label())).dim()
);
return Ok(());
}
let client = ProviderClient::new(&task.site)?;
match client.install_webhook(&task.endpoint, &task.repo, &task.url, &task.secret) {
Ok(outcome) => {
let action = match outcome {
WebhookInstallOutcome::Created => "install",
WebhookInstallOutcome::Existing => "exists",
};
if let Err(error) = client.install_webhook(&task.endpoint, &task.repo, &task.url, &task.secret)
{
if is_duplicate_webhook_error(&error) {
crate::logln!(
" {} {} {}",
style(action).green().bold(),
style("exists").green().bold(),
style(&task.repo.name).cyan(),
style(format!("webhook on {}", task.endpoint.label())).dim()
);
record_webhook_installation(state, key, task);
Ok(())
return Ok(());
}
Err(error) => {
if is_duplicate_webhook_error(&error) {
crate::logln!(
" {} {} {}",
style("exists").green().bold(),
style(&task.repo.name).cyan(),
style(format!("webhook on {}", task.endpoint.label())).dim()
);
record_webhook_installation(state, key, task);
return Ok(());
}
if let Some(reason) = non_actionable_webhook_failure_reason(&error) {
crate::logln!(
" {} {} {}",
style("skip").yellow().bold(),
style(&task.repo.name).cyan(),
style(format!("webhook on {}: {reason}", task.endpoint.label())).dim()
);
let mut state = state
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
state.skipped.insert(
key,
SkippedWebhookInstallation {
group: task.group,
endpoint: task.endpoint,
repo: task.repo.name,
url: task.url,
reason,
},
);
return Ok(());
}
Err(error).with_context(|| {
format!(
"failed to install webhook for {} on {}",
task.repo.name,
task.endpoint.label()
)
})
if let Some(reason) = non_actionable_webhook_failure_reason(&error) {
crate::logln!(
" {} {} {}",
style("skip").yellow().bold(),
style(&task.repo.name).cyan(),
style(format!("webhook on {}: {reason}", task.endpoint.label())).dim()
);
let mut state = state
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
state.skipped.insert(
key,
SkippedWebhookInstallation {
group: task.group,
endpoint: task.endpoint,
repo: task.repo.name,
url: task.url,
reason,
},
);
return Ok(());
}
return Err(error).with_context(|| {
format!(
"failed to install webhook for {} on {}",
task.repo.name,
task.endpoint.label()
)
});
}
record_webhook_installation(state, key, task);
Ok(())
}
fn record_webhook_installation(
+5 -96
View File
@@ -339,9 +339,8 @@ secret = {{ value = "{WEBHOOK_SECRET}" }}
[[mirrors]]
name = "all"
sync_visibility = "all"
repo_whitelist = '{}'
repo_whitelist = ['{}']
create_missing = {}
delete_missing = true
visibility = "public"
conflict_resolution = "{}"
@@ -380,10 +379,9 @@ namespace = "{}"
git(&work, ["tag", "v1.0.0"])?;
let remote_url = source.authenticated_repo_url(&repo)?;
self.git(&work, ["remote", "add", "origin", &remote_url])?;
self.git_retry(
self.git(
&work,
["push", "origin", "HEAD:main", "feature/github", "v1.0.0"],
"initial seed push",
)?;
source.wait_branch(
&repo,
@@ -394,7 +392,6 @@ namespace = "{}"
source.wait_repo_listed(&repo)?;
self.sync_repo(&repo, [])?;
self.assert_branch_all_equal_after_optional_resync(&repo, MAIN_BRANCH)?;
self.assert_default_branch_all_except(&repo, MAIN_BRANCH, &source.site_name)?;
self.assert_branch_all_equal(&repo, "feature/github")?;
self.assert_tag_all_equal(&repo, "v1.0.0")?;
@@ -618,7 +615,6 @@ namespace = "{}"
source.wait_branch_absent(&repo, "delete-me")?;
self.sync_repo(&repo, [])?;
self.assert_branch_absent_everywhere(&repo, "delete-me")?;
self.assert_backup_bundle_contains(&repo, "refs/refray-backups/branches/")?;
Ok(())
}
@@ -633,7 +629,6 @@ namespace = "{}"
source.wait_repo_absent(&repo)?;
self.sync_repo(&repo, [])?;
self.assert_repo_absent_everywhere(&repo)?;
self.assert_backup_bundle_contains(&repo, "refs/refray-backups/repos/")?;
Ok(())
}
@@ -701,7 +696,7 @@ namespace = "{}"
)?;
let remote_url = provider.authenticated_repo_url(repo)?;
self.git(&work, ["remote", "add", "origin", &remote_url])?;
self.git_retry(&work, ["push", "origin", "HEAD:main"], "seed push")?;
self.git(&work, ["push", "origin", "HEAD:main"])?;
provider.wait_branch(
repo,
MAIN_BRANCH,
@@ -728,11 +723,7 @@ namespace = "{}"
for provider in &self.settings.providers {
let remote_url = provider.authenticated_repo_url(repo)?;
self.git(&work, ["remote", "add", &provider.site_name, &remote_url])?;
self.git_retry(
&work,
["push", &provider.site_name, "HEAD:main"],
"seed-all push",
)?;
self.git(&work, ["push", &provider.site_name, "HEAD:main"])?;
provider.wait_branch(repo, MAIN_BRANCH, &sha)?;
provider.wait_repo_listed(repo)?;
provider.unprotect_branch(repo, MAIN_BRANCH)?;
@@ -793,15 +784,11 @@ namespace = "{}"
assert_output_success(output, "git", &self.redactor)
}
fn git_retry<const N: usize>(&self, path: &Path, args: [&str; N], label: &str) -> Result<()> {
retry(label, || self.git(path, args))
}
fn set_repo_whitelist(&self, pattern: &str) -> Result<()> {
let contents = fs::read_to_string(&self.config_path)
.with_context(|| format!("failed to read {}", self.config_path.display()))?;
let escaped_pattern = pattern.replace('\'', "''");
let replacement = format!("repo_whitelist = '{escaped_pattern}'");
let replacement = format!("repo_whitelist = ['{escaped_pattern}']");
let mut replaced = false;
let mut updated = contents
.lines()
@@ -1074,30 +1061,6 @@ namespace = "{}"
}
}
fn assert_default_branch_all_except(
&self,
repo: &str,
branch: &str,
excluded_site: &str,
) -> Result<()> {
retry("default branch metadata", || {
for provider in &self.settings.providers {
if provider.site_name == excluded_site {
continue;
}
let actual = provider.default_branch(repo)?;
if actual.as_deref() != Some(branch) {
bail!(
"expected default branch {branch} on {} for {repo}, got {:?}",
provider.site_name,
actual
);
}
}
Ok(())
})
}
fn assert_tag_all_equal(&self, repo: &str, tag: &str) -> Result<()> {
retry("tag convergence", || {
let refs = self.refs_by_provider(repo)?;
@@ -1127,29 +1090,6 @@ namespace = "{}"
})
}
fn assert_backup_bundle_contains(&self, repo: &str, marker: &str) -> Result<()> {
let bundles = self.backup_bundles_for_repo(repo)?;
for bundle in &bundles {
let output = Command::new("git")
.args(["bundle", "list-heads", bundle.to_str().unwrap()])
.output()
.context("failed to run git bundle list-heads")?;
if output.status.success() && String::from_utf8_lossy(&output.stdout).contains(marker) {
return Ok(());
}
}
bail!(
"no local backup bundle for {repo} contained {marker}; checked {:?}",
bundles
)
}
fn backup_bundles_for_repo(&self, repo: &str) -> Result<Vec<PathBuf>> {
let mut bundles = Vec::new();
collect_backup_bundles(&self.cache_home, repo, &mut bundles)?;
Ok(bundles)
}
fn assert_conflict_branch_exists(&self, repo: &str) -> Result<()> {
retry("conflict branch", || {
for refs in self.refs_by_provider(repo)?.values() {
@@ -1421,17 +1361,6 @@ impl ProviderAccount {
}
}
fn default_branch(&self, repo: &str) -> Result<Option<String>> {
let value = self
.get_json::<Value>(&self.repo_api_url(repo))
.with_context(|| format!("failed to inspect {} default branch", self.site_name))?;
Ok(value
.get("default_branch")
.and_then(Value::as_str)
.filter(|branch| !branch.is_empty())
.map(ToOwned::to_owned))
}
fn wait_repo_present(&self, repo: &str) -> Result<()> {
retry("repo present", || {
if self.repo_exists(repo)? {
@@ -2022,26 +1951,6 @@ fn assert_output_success(output: Output, label: &str, redactor: &Redactor) -> Re
)
}
fn collect_backup_bundles(dir: &Path, repo: &str, output: &mut Vec<PathBuf>) -> Result<()> {
if !dir.exists() {
return Ok(());
}
for entry in fs::read_dir(dir).with_context(|| format!("failed to read {}", dir.display()))? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
collect_backup_bundles(&path, repo, output)?;
continue;
}
if path.extension().and_then(|value| value.to_str()) == Some("bundle")
&& path.to_string_lossy().contains(repo)
{
output.push(path);
}
}
Ok(())
}
fn retry(label: &str, mut action: impl FnMut() -> Result<()>) -> Result<()> {
let mut last_error = None;
for _ in 0..30 {
+15 -45
View File
@@ -22,10 +22,9 @@ fn parses_value_tokens() {
[[mirrors]]
name = "personal"
sync_visibility = "public"
repo_whitelist = "^important-|-mirror$"
repo_blacklist = "-archive$"
repo_whitelist = ["^important-", "-mirror$"]
repo_blacklist = ["-archive$"]
create_missing = true
delete_missing = false
visibility = "private"
conflict_resolution = "auto_rebase_pull_request"
@@ -52,13 +51,12 @@ fn parses_value_tokens() {
assert_eq!(config.mirrors[0].sync_visibility, SyncVisibility::Public);
assert_eq!(
config.mirrors[0].repo_whitelist,
Some("^important-|-mirror$".to_string())
vec!["^important-".to_string(), "-mirror$".to_string()]
);
assert_eq!(
config.mirrors[0].repo_blacklist,
Some("-archive$".to_string())
vec!["-archive$".to_string()]
);
assert!(!config.mirrors[0].delete_missing);
let webhook = config.webhook.unwrap();
assert!(webhook.install);
assert_eq!(webhook.url, "https://mirror.example.test/webhook");
@@ -94,30 +92,6 @@ fn config_defaults_jobs() {
assert_eq!(config.jobs, DEFAULT_JOBS);
}
#[test]
fn mirror_defaults_to_deleting_missing_repos_for_existing_configs() {
let config: Config = toml::from_str(
r#"
[[mirrors]]
name = "personal"
create_missing = true
[[mirrors.endpoints]]
site = "github"
kind = "user"
namespace = "alice"
[[mirrors.endpoints]]
site = "gitea"
kind = "user"
namespace = "alice"
"#,
)
.unwrap();
assert!(config.mirrors[0].delete_missing);
}
#[test]
fn validation_rejects_unknown_sites_and_single_endpoint_groups() {
let config = Config {
@@ -131,10 +105,9 @@ fn validation_rejects_unknown_sites_and_single_endpoint_groups() {
namespace: "alice".to_string(),
}],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -161,10 +134,9 @@ fn validation_rejects_unknown_sites_and_single_endpoint_groups() {
},
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -227,8 +199,8 @@ fn sync_visibility_matches_repo_privacy() {
#[test]
fn repo_name_filter_applies_whitelist_then_blacklist() {
let mut mirror = mirror_config();
mirror.repo_whitelist = Some("^important-|-mirror$".to_string());
mirror.repo_blacklist = Some("-archive$".to_string());
mirror.repo_whitelist = vec!["^important-".to_string(), "-mirror$".to_string()];
mirror.repo_blacklist = vec!["-archive$".to_string()];
let filter = mirror.repo_filter().unwrap();
assert!(filter.matches("important-api"));
@@ -245,7 +217,7 @@ fn validation_rejects_invalid_repo_filter_regex() {
mirrors: vec![mirror_config()],
webhook: None,
};
config.mirrors[0].repo_whitelist = Some("(".to_string());
config.mirrors[0].repo_whitelist = vec!["(".to_string()];
let err = validate_config(&config).unwrap_err().to_string();
@@ -266,10 +238,9 @@ fn validation_rejects_duplicate_mirror_endpoints() {
name: "broken".to_string(),
endpoints: vec![duplicate.clone(), duplicate],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -313,10 +284,9 @@ fn mirror_config() -> MirrorConfig {
},
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}
-51
View File
@@ -41,19 +41,6 @@ fn detects_provider_disabled_repository_errors() {
assert!(!is_disabled_repository_error(&generic_forbidden));
}
#[test]
fn detects_missing_repository_errors() {
let error: anyhow::Error = GitCommandError::new(
"git ls-remote",
"",
"remote: Repository not found.\nfatal: repository 'https://github.com/alice/missing.git/' not found",
)
.into();
assert!(is_missing_repository_error(&error));
assert!(!is_disabled_repository_error(&error));
}
#[test]
fn ls_remote_snapshot_changes_when_remote_refs_change() {
let fixture = GitFixture::new();
@@ -288,44 +275,6 @@ fn delete_branches_removes_branch_from_target_remotes() {
assert!(!fixture.remote_ref_exists(&fixture.remote_b, "refs/heads/main"));
}
#[test]
fn backup_refs_create_restorable_bundle_before_branch_delete() {
let fixture = GitFixture::new();
let expected = fixture.commit("base", "base", 1_700_000_000);
fixture.push_head(&fixture.remote_a, "main");
fixture.push_head(&fixture.remote_b, "main");
let mirror = fixture.mirror();
fixture.fetch_all(&mirror);
let backup_ref = "refs/refray-backups/branches/main/test/a".to_string();
mirror
.backup_refs(&[RefBackup {
refname: backup_ref.clone(),
sha: expected.clone(),
description: "branch main before delete".to_string(),
}])
.unwrap();
let bundle = fixture._temp.path().join("branch-backup.bundle");
mirror
.create_bundle(&bundle, std::slice::from_ref(&backup_ref))
.unwrap();
mirror
.delete_branches(
&fixture.remotes(),
&[BranchDeletion {
branch: "main".to_string(),
deleted_remotes: vec!["a".to_string()],
target_remotes: vec!["b".to_string()],
}],
)
.unwrap();
let heads = git_output(None, ["bundle", "list-heads", bundle.to_str().unwrap()]);
assert!(heads.contains(&expected));
assert!(heads.contains(&backup_ref));
}
#[test]
fn tag_decisions_mirror_matching_or_missing_tags_and_skip_divergent_tags() {
let fixture = GitFixture::new();
+15 -65
View File
@@ -14,8 +14,6 @@ fn wizard_builds_sync_group_from_profile_urls() {
"",
"",
"",
"",
"",
"n",
"4",
]
@@ -48,7 +46,6 @@ fn wizard_builds_sync_group_from_profile_urls() {
assert_eq!(config.mirrors[0].endpoints[1].namespace, "azalea");
assert_eq!(config.mirrors[0].sync_visibility, SyncVisibility::All);
assert!(config.mirrors[0].create_missing);
assert!(config.mirrors[0].delete_missing);
assert_eq!(config.mirrors[0].visibility, Visibility::Private);
assert_eq!(
config.mirrors[0].conflict_resolution,
@@ -57,9 +54,6 @@ fn wizard_builds_sync_group_from_profile_urls() {
let output = String::from_utf8(output).unwrap();
assert!(output.contains("1. github.com/hykilpikonna <-> gitea.example.test/azalea"));
assert!(output.contains("Deletion backups: refray keeps a local backup"));
assert!(output.contains("Create repositories that are missing from an endpoint?"));
assert!(output.contains("delete it everywhere?"));
assert!(output.contains("Add another sync group"));
assert!(output.contains("Edit an existing group"));
assert!(output.contains("Delete an existing group"));
@@ -83,8 +77,6 @@ fn wizard_can_build_three_way_sync() {
"",
"",
"",
"",
"",
"n",
"4",
]
@@ -100,35 +92,6 @@ fn wizard_can_build_three_way_sync() {
assert_eq!(config.sites.len(), 3);
}
#[test]
fn wizard_can_disable_missing_repo_creation_and_repo_delete_propagation() {
let input = [
"https://github.com/alice",
"gh-token",
"",
"https://gitea.example.test/alice",
"gt-token",
"",
"n",
"",
"",
"n",
"n",
"",
"n",
"4",
]
.join("\n")
+ "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
let config = run_config_wizard_with_io(Config::default(), &mut reader, &mut output).unwrap();
assert!(!config.mirrors[0].create_missing);
assert!(!config.mirrors[0].delete_missing);
}
#[test]
fn wizard_can_enable_webhooks() {
let input = [
@@ -142,8 +105,6 @@ fn wizard_can_enable_webhooks() {
"",
"",
"",
"",
"",
"y",
"https://mirror.example.test/webhook",
"y",
@@ -198,8 +159,6 @@ fn wizard_reuses_existing_credentials_for_same_instance() {
"",
"",
"",
"",
"",
"n",
"4",
]
@@ -252,10 +211,9 @@ fn wizard_starts_existing_config_at_sync_group_menu() {
},
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -282,10 +240,9 @@ fn wizard_can_ask_to_run_full_sync_after_config() {
name: "sync-1".to_string(),
endpoints: Vec::new(),
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -359,10 +316,9 @@ fn wizard_edits_existing_sync_group_from_menu() {
},
],
sync_visibility: SyncVisibility::Private,
repo_whitelist: Some("^important-".to_string()),
repo_blacklist: Some("-archive$".to_string()),
repo_whitelist: vec!["^important-".to_string()],
repo_blacklist: vec!["-archive$".to_string()],
create_missing: false,
delete_missing: true,
visibility: Visibility::Public,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -381,8 +337,6 @@ fn wizard_edits_existing_sync_group_from_menu() {
"^public-",
"-skip$",
"",
"",
"",
"n",
"4",
]
@@ -402,10 +356,9 @@ fn wizard_edits_existing_sync_group_from_menu() {
assert_eq!(mirror.endpoints[1].site, "gitlab");
assert_eq!(mirror.endpoints[1].namespace, "bob");
assert!(!mirror.create_missing);
assert!(mirror.delete_missing);
assert_eq!(mirror.sync_visibility, SyncVisibility::Public);
assert_eq!(mirror.repo_whitelist, Some("^public-".to_string()));
assert_eq!(mirror.repo_blacklist, Some("-skip$".to_string()));
assert_eq!(mirror.repo_whitelist, vec!["^public-".to_string()]);
assert_eq!(mirror.repo_blacklist, vec!["-skip$".to_string()]);
assert_eq!(mirror.visibility, Visibility::Public);
let output = String::from_utf8(output).unwrap();
assert!(output.contains("Edit sync group"));
@@ -450,16 +403,15 @@ fn wizard_prefills_existing_sync_group_when_editing() {
},
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
webhook: None,
};
let input = ["2", "1", "", "", "", "", "n", "", "", "", "", "", "n", "4"].join("\n") + "\n";
let input = ["2", "1", "", "", "", "", "n", "", "", "", "n", "4"].join("\n") + "\n";
let mut reader = Cursor::new(input.as_bytes());
let mut output = Vec::new();
@@ -515,10 +467,9 @@ fn wizard_deletes_existing_sync_group_from_menu() {
},
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -575,10 +526,9 @@ fn wizard_can_go_back_from_delete_menu() {
},
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
+19 -71
View File
@@ -65,9 +65,6 @@ where
let endpoints = prompt_sync_group_endpoints(reader, writer, config, &[])?;
let sync_visibility = prompt_sync_visibility(reader, writer, None)?;
let repo_filters = prompt_repo_filters(reader, writer, None)?;
write_deletion_backup_notice(writer)?;
let create_missing = prompt_create_missing(reader, writer, None)?;
let delete_missing = prompt_delete_missing(reader, writer, None)?;
let conflict_resolution = prompt_conflict_resolution(reader, writer, None)?;
config.upsert_mirror(MirrorConfig {
name: next_mirror_name(config),
@@ -75,8 +72,7 @@ where
sync_visibility,
repo_whitelist: repo_filters.whitelist,
repo_blacklist: repo_filters.blacklist,
create_missing,
delete_missing,
create_missing: true,
visibility: Visibility::Private,
conflict_resolution,
});
@@ -280,8 +276,6 @@ where
whitelist: config.mirrors[index - 1].repo_whitelist.clone(),
blacklist: config.mirrors[index - 1].repo_blacklist.clone(),
};
let existing_create_missing = config.mirrors[index - 1].create_missing;
let existing_delete_missing = config.mirrors[index - 1].delete_missing;
let existing_conflict_resolution =
config.mirrors[index - 1].conflict_resolution.clone();
let endpoints = prompt_sync_group_endpoints(reader, writer, config, &existing)?;
@@ -289,11 +283,6 @@ where
prompt_sync_visibility(reader, writer, Some(&existing_sync_visibility))?;
let repo_filters =
prompt_repo_filters(reader, writer, Some(&existing_repo_filters))?;
write_deletion_backup_notice(writer)?;
let create_missing =
prompt_create_missing(reader, writer, Some(existing_create_missing))?;
let delete_missing =
prompt_delete_missing(reader, writer, Some(existing_delete_missing))?;
let conflict_resolution = prompt_conflict_resolution(
reader,
writer,
@@ -303,8 +292,6 @@ where
config.mirrors[index - 1].sync_visibility = sync_visibility;
config.mirrors[index - 1].repo_whitelist = repo_filters.whitelist;
config.mirrors[index - 1].repo_blacklist = repo_filters.blacklist;
config.mirrors[index - 1].create_missing = create_missing;
config.mirrors[index - 1].delete_missing = delete_missing;
config.mirrors[index - 1].conflict_resolution = conflict_resolution;
prompt_webhook_setup(reader, writer, config)?;
writeln!(writer, "updated sync group {index}")?;
@@ -544,7 +531,7 @@ where
W: Write,
{
let existing = existing.cloned().unwrap_or_default();
let has_existing = existing.whitelist.is_some() || existing.blacklist.is_some();
let has_existing = !existing.whitelist.is_empty() || !existing.blacklist.is_empty();
if !prompt_bool(
reader,
writer,
@@ -555,79 +542,40 @@ where
}
Ok(RepoFilterInput {
whitelist: prompt_repo_pattern(
whitelist: prompt_repo_pattern_list(
reader,
writer,
"Whitelist regex (empty means all repo names)",
"Whitelist regexes (comma-separated, empty means all repo names)",
&existing.whitelist,
)?,
blacklist: prompt_repo_pattern(reader, writer, "Blacklist regex", &existing.blacklist)?,
blacklist: prompt_repo_pattern_list(
reader,
writer,
"Blacklist regexes (comma-separated)",
&existing.blacklist,
)?,
})
}
fn prompt_repo_pattern<R, W>(
fn prompt_repo_pattern_list<R, W>(
reader: &mut R,
writer: &mut W,
label: &str,
existing: &Option<String>,
) -> Result<Option<String>>
existing: &[String],
) -> Result<Vec<String>>
where
R: BufRead,
W: Write,
{
let value = match existing {
Some(existing) => prompt_with_default(reader, writer, label, existing)?,
None => prompt_optional(reader, writer, label)?,
let value = if existing.is_empty() {
prompt_optional(reader, writer, label)?
} else {
prompt_with_default(reader, writer, label, &existing.join(", "))?
};
if let Err(error) = validate_repo_pattern(&value) {
if let Err(error) = validate_repo_pattern_list(&value) {
bail!(error);
}
Ok(parse_repo_pattern(&value))
}
fn write_deletion_backup_notice<W>(writer: &mut W) -> Result<()>
where
W: Write,
{
writeln!(
writer,
"Deletion backups: refray keeps a local backup before propagating repository or branch deletes."
)?;
Ok(())
}
fn prompt_create_missing<R, W>(
reader: &mut R,
writer: &mut W,
existing: Option<bool>,
) -> Result<bool>
where
R: BufRead,
W: Write,
{
prompt_bool(
reader,
writer,
"Create repositories that are missing from an endpoint?",
existing.unwrap_or(true),
)
}
fn prompt_delete_missing<R, W>(
reader: &mut R,
writer: &mut W,
existing: Option<bool>,
) -> Result<bool>
where
R: BufRead,
W: Write,
{
prompt_bool(
reader,
writer,
"When a previously synced repository is deleted from one endpoint, delete it everywhere?",
existing.unwrap_or(true),
)
Ok(parse_repo_pattern_list(&value))
}
fn sync_visibility_value(sync_visibility: &SyncVisibility) -> &'static str {
+1 -13
View File
@@ -1,20 +1,8 @@
use super::*;
#[test]
fn repo_prefix_pads_and_truncates_to_fixed_width() {
assert_eq!(repo_prefix("api", 6), "api ");
assert_eq!(repo_prefix("very-long-repo", 8), "very-lo~");
}
#[test]
fn status_text_truncates_to_fixed_width() {
assert_eq!(truncate_status("short", 8), "short");
assert_eq!(truncate_status("very-long-status", 8), "very-lo~");
}
#[test]
fn repo_log_context_is_inherited_by_parallel_workers() {
let _guard = start_repo_log("repo-a".to_string(), 0, 8);
let _guard = start_repo_log();
crate::logln!("outer line");
crate::parallel::map(vec!["worker line"], 1, |line| {
+1 -208
View File
@@ -225,42 +225,6 @@ fn list_gitlab_user_repos_merges_authenticated_owned_projects() {
handle.join().unwrap();
}
#[test]
fn list_gitlab_group_repos_ignores_deletion_scheduled_projects() {
let projects = r#"[
{"name":"active","path":"active","path_with_namespace":"maigolabs/active","http_url_to_repo":"https://gitlab.example.test/maigolabs/active.git","visibility":"private","description":null,"namespace":{"path":"maigolabs","full_path":"maigolabs"}},
{"name":"Kairos-deletion_scheduled-82068172","path":"Kairos-deletion_scheduled-82068172","path_with_namespace":"maigolabs/Kairos-deletion_scheduled-82068172","http_url_to_repo":"https://gitlab.example.test/maigolabs/Kairos-deletion_scheduled-82068172.git","visibility":"private","description":null,"namespace":{"path":"maigolabs","full_path":"maigolabs"}},
{"name":"marked-at","path":"marked-at","path_with_namespace":"maigolabs/marked-at","http_url_to_repo":"https://gitlab.example.test/maigolabs/marked-at.git","visibility":"private","description":null,"namespace":{"path":"maigolabs","full_path":"maigolabs"},"marked_for_deletion_at":"2026-05-17"},
{"name":"marked-on","path":"marked-on","path_with_namespace":"maigolabs/marked-on","http_url_to_repo":"https://gitlab.example.test/maigolabs/marked-on.git","visibility":"private","description":null,"namespace":{"path":"maigolabs","full_path":"maigolabs"},"marked_for_deletion_on":"2026-05-17"},
{"name":"pending","path":"pending","path_with_namespace":"maigolabs/pending","http_url_to_repo":"https://gitlab.example.test/maigolabs/pending.git","visibility":"private","description":null,"namespace":{"path":"maigolabs","full_path":"maigolabs"},"pending_delete":true}
]"#;
let (api_url, handle) = one_request_server("200 OK", projects, |request| {
assert!(
request.starts_with(
"GET /groups/maigolabs/projects?simple=true&include_subgroups=false&per_page=100 "
),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitlab, None)
};
let repos = ProviderClient::new(&site)
.unwrap()
.list_repos(&EndpointConfig {
site: "gitlab".to_string(),
kind: NamespaceKind::Group,
namespace: "maigolabs".to_string(),
})
.unwrap();
assert_eq!(repos.len(), 1);
assert_eq!(repos[0].name, "active");
handle.join().unwrap();
}
#[test]
fn create_gitlab_repo_returns_existing_repo_when_path_is_taken() {
let existing = r#"{"name":"repo","path":"repo","path_with_namespace":"alice/repo","http_url_to_repo":"https://gitlab.example.test/alice/repo.git","visibility":"public","description":"existing","namespace":{"path":"alice","full_path":"alice"}}"#;
@@ -318,82 +282,6 @@ fn create_gitlab_repo_returns_existing_repo_when_path_is_taken() {
handle.join().unwrap();
}
#[test]
fn set_github_default_branch_patches_repo() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(
request.starts_with("PATCH /repos/alice/repo "),
"request was {request}"
);
assert!(
request.contains(r#""default_branch":"main""#),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("authorization: bearer secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Github, None)
};
ProviderClient::new(&site)
.unwrap()
.set_default_branch(
&EndpointConfig {
site: "github".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
"repo",
"main",
)
.unwrap();
handle.join().unwrap();
}
#[test]
fn set_gitlab_default_branch_updates_project() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(
request.starts_with("PUT /projects/alice%2Frepo "),
"request was {request}"
);
assert!(
request.contains(r#""default_branch":"main""#),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("private-token: secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitlab, None)
};
ProviderClient::new(&site)
.unwrap()
.set_default_branch(
&EndpointConfig {
site: "gitlab".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
"repo",
"main",
)
.unwrap();
handle.join().unwrap();
}
#[test]
fn install_webhook_posts_github_hook_when_missing() {
let (api_url, handle) = request_server(
@@ -421,7 +309,7 @@ fn install_webhook_posts_github_hook_when_missing() {
};
let client = ProviderClient::new(&site).unwrap();
let outcome = client
client
.install_webhook(
&EndpointConfig {
site: "github".to_string(),
@@ -438,63 +326,6 @@ fn install_webhook_posts_github_hook_when_missing() {
"secret",
)
.unwrap();
assert_eq!(outcome, WebhookInstallOutcome::Created);
handle.join().unwrap();
}
#[test]
fn install_webhook_reports_existing_forgejo_hook() {
let (api_url, handle) = request_server(
vec![
(
"200 OK",
r#"[{"id":42,"config":{"url":"https://mirror.example.test/webhook/"}}]"#,
),
("200 OK", r#"{"id":42}"#),
],
|index, request| match index {
0 => assert!(
request.starts_with("GET /repos/alice/repo/hooks "),
"request was {request}"
),
1 => {
assert!(
request.starts_with("PATCH /repos/alice/repo/hooks/42 "),
"request was {request}"
);
assert!(request.contains("https://mirror.example.test/webhook"));
assert!(request.contains("secret"));
assert!(request.contains("push"));
}
_ => unreachable!(),
},
);
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Forgejo, None)
};
let client = ProviderClient::new(&site).unwrap();
let outcome = client
.install_webhook(
&EndpointConfig {
site: "forgejo".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
&RemoteRepo {
name: "repo".to_string(),
clone_url: "https://codeberg.org/alice/repo.git".to_string(),
private: true,
description: None,
},
"https://mirror.example.test/webhook",
"secret",
)
.unwrap();
assert_eq!(outcome, WebhookInstallOutcome::Existing);
handle.join().unwrap();
}
@@ -822,44 +653,6 @@ fn create_gitea_repo_returns_existing_repo_on_conflict() {
handle.join().unwrap();
}
#[test]
fn set_gitea_default_branch_patches_repo() {
let (api_url, handle) = one_request_server("200 OK", "{}", |request| {
assert!(
request.starts_with("PATCH /repos/alice/repo "),
"request was {request}"
);
assert!(
request.contains(r#""default_branch":"main""#),
"request was {request}"
);
assert!(
request
.to_ascii_lowercase()
.contains("authorization: token secret"),
"request was {request}"
);
});
let site = SiteConfig {
api_url: Some(api_url),
..site(ProviderKind::Gitea, None)
};
ProviderClient::new(&site)
.unwrap()
.set_default_branch(
&EndpointConfig {
site: "gitea".to_string(),
kind: NamespaceKind::User,
namespace: "alice".to_string(),
},
"repo",
"main",
)
.unwrap();
handle.join().unwrap();
}
#[test]
fn open_pull_request_posts_github_pull_when_missing() {
let (api_url, handle) = request_server(
+3 -129
View File
@@ -174,29 +174,6 @@ fn branch_deletion_decisions_ignore_internal_conflict_branches() {
assert!(blocked.is_empty());
}
#[test]
fn branches_deleted_everywhere_are_backed_up_before_prune() {
let mut previous = BTreeMap::new();
previous.insert(
"github".to_string(),
remote_ref_state("a", &[("main", "111")]),
);
previous.insert(
"gitea".to_string(),
remote_ref_state("b", &[("main", "111")]),
);
let backups = branches_deleted_everywhere_backups(&previous, &BTreeMap::new(), "stamp");
assert_eq!(backups.len(), 1);
assert_eq!(backups[0].sha, "111");
assert!(
backups[0]
.refname
.starts_with("refs/refray-backups/branches/")
);
}
#[test]
fn repo_deletion_decision_propagates_previous_synced_repo_deletion() {
let mirror = test_mirror();
@@ -231,35 +208,6 @@ fn repo_deletion_decision_propagates_previous_synced_repo_deletion() {
);
}
#[test]
fn repo_deletion_decision_is_disabled_by_mirror_policy() {
let mut mirror = test_mirror();
mirror.delete_missing = false;
let mut previous = BTreeMap::new();
previous.insert(
remote_key("github"),
remote_ref_state("a", &[("main", "111")]),
);
previous.insert(
remote_key("gitea"),
remote_ref_state("b", &[("main", "111")]),
);
let mut current = BTreeMap::new();
current.insert(
remote_key("gitea"),
remote_ref_state("b", &[("main", "111")]),
);
let decision = repo_deletion_decision(
&mirror,
&[endpoint_repo("gitea")],
Some(&previous),
&current,
);
assert_eq!(decision, RepoDeletionDecision::None);
}
#[test]
fn repo_deletion_decision_conflicts_when_remaining_repo_changed() {
let mirror = test_mirror();
@@ -396,7 +344,7 @@ fn all_visibility_keeps_state_only_repos_for_deletion_detection() {
#[test]
fn repo_name_filters_do_not_treat_state_only_repos_as_deleted() {
let mut mirror = test_mirror();
mirror.repo_whitelist = Some("^public-".to_string());
mirror.repo_whitelist = vec!["^public-".to_string()];
let repo_filter = mirror.repo_filter().unwrap();
let mut ref_state = RefState::default();
ref_state.set_repo(
@@ -448,46 +396,6 @@ fn endpoint_remote_names_do_not_slug_collide() {
);
}
#[test]
fn targeted_endpoint_repos_synthesize_clone_urls_without_listing() {
let mirror = MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![EndpointConfig {
site: "gitlab".to_string(),
kind: crate::config::NamespaceKind::Group,
namespace: "parent/child".to_string(),
}],
sync_visibility: crate::config::SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
create_missing: true,
delete_missing: true,
visibility: crate::config::Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
};
let config = Config {
jobs: crate::config::DEFAULT_JOBS,
sites: vec![crate::config::SiteConfig {
name: "gitlab".to_string(),
provider: crate::config::ProviderKind::Gitlab,
base_url: "https://gitlab.example.test/root".to_string(),
api_url: None,
token: crate::config::TokenConfig::Value("token".to_string()),
git_username: None,
}],
mirrors: vec![mirror.clone()],
webhook: None,
};
let repos = targeted_endpoint_repos(&config, &mirror, "repo").unwrap();
assert_eq!(repos.len(), 1);
assert_eq!(
repos[0].repo.clone_url,
"https://gitlab.example.test/root/parent/child/repo.git"
);
}
#[test]
fn created_repo_visibility_follows_existing_public_repo() {
let mirror = test_mirror();
@@ -532,39 +440,6 @@ fn created_repo_visibility_falls_back_to_config_without_template() {
);
}
#[test]
fn gitlab_invalid_project_name_errors_are_skippable() {
let error = anyhow::Error::msg(
r#"POST https://gitlab.com/api/v4/projects returned 400 Bad Request: {"message":{"project_namespace.path":["can only include non-accented letters, digits, '_', '-' and '.'. It must not start with '-', '_', or '.'."],"name":["can contain only letters, digits, emoji, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'."]}}"#,
);
assert!(is_gitlab_invalid_project_name_error(&error));
}
#[test]
fn gitlab_project_path_validation_matches_create_constraints() {
for name in ["Kairos", "needLe", "amaoke.app", "repo_1", "repo-1"] {
assert!(is_supported_gitlab_project_path(name), "{name}");
}
for name in [
"",
".github",
"_private",
"-draft",
"repo.",
"repo_",
"repo-",
"repo.git",
"repo.atom",
"has space",
"has+plus",
"荞麦main",
] {
assert!(!is_supported_gitlab_project_path(name), "{name}");
}
}
fn remote_ref_state(hash: &str, branches: &[(&str, &str)]) -> RemoteRefState {
RemoteRefState {
hash: hash.to_string(),
@@ -597,10 +472,9 @@ fn test_mirror() -> MirrorConfig {
name: "sync-1".to_string(),
endpoints: vec![endpoint("github"), endpoint("gitea")],
sync_visibility: crate::config::SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: crate::config::Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}
+9 -13
View File
@@ -112,10 +112,9 @@ fn matches_jobs_by_provider_and_namespace() {
endpoint("gitea", NamespaceKind::User, "azalea"),
],
sync_visibility: SyncVisibility::All,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -140,10 +139,9 @@ fn matching_jobs_respects_repo_name_filters() {
name: "sync-1".to_string(),
endpoints: vec![endpoint("github", NamespaceKind::User, "alice")],
sync_visibility: SyncVisibility::All,
repo_whitelist: Some("^important-".to_string()),
repo_blacklist: Some("-archive$".to_string()),
repo_whitelist: vec!["^important-".to_string()],
repo_blacklist: vec!["-archive$".to_string()],
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
};
@@ -161,7 +159,7 @@ fn matching_jobs_respects_repo_name_filters() {
assert!(matching_jobs(&config, &webhook_event("important-archive")).is_empty());
assert!(matching_jobs(&config, &webhook_event("random")).is_empty());
mirror.repo_whitelist = None;
mirror.repo_whitelist.clear();
let config = Config {
jobs: crate::config::DEFAULT_JOBS,
sites: vec![site("github", ProviderKind::Github)],
@@ -359,10 +357,9 @@ fn uninstall_webhooks_skips_blocked_provider_access() {
endpoint("github-peer", NamespaceKind::User, "bob"),
],
sync_visibility: SyncVisibility::Public,
repo_whitelist: None,
repo_blacklist: None,
repo_whitelist: Vec::new(),
repo_blacklist: Vec::new(),
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}],
@@ -713,10 +710,9 @@ fn filtered_mirror() -> MirrorConfig {
endpoint("github-peer", NamespaceKind::User, "bob"),
],
sync_visibility: SyncVisibility::Public,
repo_whitelist: Some("^important-".to_string()),
repo_blacklist: Some("-archive$".to_string()),
repo_whitelist: vec!["^important-".to_string()],
repo_blacklist: vec!["-archive$".to_string()],
create_missing: true,
delete_missing: true,
visibility: Visibility::Private,
conflict_resolution: ConflictResolutionStrategy::Fail,
}