create src

This commit is contained in:
awfixer
2026-03-11 02:04:19 -07:00
commit 52f7a22bf2
2595 changed files with 402870 additions and 0 deletions

1595
src-refspec/CHANGELOG.md Normal file

File diff suppressed because it is too large Load Diff

37
src-refspec/Cargo.toml Normal file
View File

@@ -0,0 +1,37 @@
lints.workspace = true
[package]
name = "src-refspec"
version = "0.38.0"
repository = "https://github.com/GitoxideLabs/gitoxide"
license = "MIT OR Apache-2.0"
description = "A crate of the gitoxide project for parsing and representing refspecs"
authors = ["Sebastian Thiel <sebastian.thiel@icloud.com>"]
edition = "2021"
include = ["src/**/*", "LICENSE-*", "README.md"]
rust-version = "1.82"
[lib]
doctest = false
[features]
## Enable support for the SHA-1 hash by enabling the respective feature in the `src-hash` crate.
sha1 = ["src-hash/sha1"]
[dependencies]
src-error = { version = "^0.2.0", path = "../src-error" }
src-revision = { version = "^0.42.0", path = "../src-revision", default-features = false }
src-validate = { version = "^0.11.0", path = "../src-validate" }
src-hash = { version = "^0.22.1", path = "../src-hash" }
src-glob = { version = "^0.24.0", path = "../src-glob" }
bstr = { version = "1.12.0", default-features = false, features = ["std"] }
thiserror = "2.0.18"
smallvec = "1.15.1"
[dev-dependencies]
src-testtools = { path = "../tests/tools" }
insta = "1.46.3"
[package.metadata.docs.rs]
features = ["sha1"]

1
src-refspec/LICENSE-APACHE Symbolic link
View File

@@ -0,0 +1 @@
../LICENSE-APACHE

1
src-refspec/LICENSE-MIT Symbolic link
View File

@@ -0,0 +1 @@
../LICENSE-MIT

11
src-refspec/README.md Normal file
View File

@@ -0,0 +1,11 @@
# `src-refspec`
### Testing
#### Fuzzing
`cargo fuzz` is used for fuzzing, installable with `cargo install cargo-fuzz`.
Targets can be listed with `cargo fuzz list` and executed via `cargo +nightly fuzz run <target>`,
where `<target>` can be `parse` for example.

7
src-refspec/fuzz/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
target
corpus
artifacts
# These usually involve a lot of local CPU time, keep them.
$artifacts
$corpus

View File

@@ -0,0 +1,26 @@
[package]
name = "src-refspec-fuzz"
version = "0.0.0"
authors = ["Automatically generated"]
publish = false
edition = "2021"
[package.metadata]
cargo-fuzz = true
[dependencies]
libfuzzer-sys = "0.4"
[dependencies.src-refspec]
path = ".."
features = ["sha1"]
# Prevent this from interfering with workspaces
[workspace]
members = ["."]
[[bin]]
name = "parse"
path = "fuzz_targets/parse.rs"
test = false
doc = false

View File

@@ -0,0 +1,7 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
drop(gix_refspec::parse(data.into(), gix_refspec::parse::Operation::Push));
drop(gix_refspec::parse(data.into(), gix_refspec::parse::Operation::Fetch));
});

View File

@@ -0,0 +1,73 @@
use bstr::BStr;
use crate::{parse::Operation, Instruction};
impl Instruction<'_> {
/// Derive the mode of operation from this instruction.
pub fn operation(&self) -> Operation {
match self {
Instruction::Push(_) => Operation::Push,
Instruction::Fetch(_) => Operation::Fetch,
}
}
}
/// Note that all sources can either be a ref-name, partial or full, or a rev-spec, unless specified otherwise, on the local side.
/// Destinations can only be a partial or full ref names on the remote side.
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone, Hash, Debug)]
pub enum Push<'a> {
/// Push all local branches to the matching destination on the remote, which has to exist to be updated.
AllMatchingBranches {
/// If true, allow non-fast-forward updates of the matched destination branch.
allow_non_fast_forward: bool,
},
/// Delete the destination ref or glob pattern, with only a single `*` allowed.
Delete {
/// The reference or pattern to delete on the remote.
ref_or_pattern: &'a BStr,
},
/// Push a single ref or refspec to a known destination ref.
Matching {
/// The source ref or refspec to push. If pattern, it contains a single `*`.
/// Examples are refnames like `HEAD` or `refs/heads/main`, or patterns like `refs/heads/*`.
src: &'a BStr,
/// The ref to update with the object from `src`. If `src` is a pattern, this is a pattern too.
/// Examples are refnames like `HEAD` or `refs/heads/main`, or patterns like `refs/heads/*`.
dst: &'a BStr,
/// If true, allow non-fast-forward updates of `dest`.
allow_non_fast_forward: bool,
},
/// Exclude a single ref.
Exclude {
/// A full or partial ref name to exclude, or multiple if a single `*` is used.
src: &'a BStr,
},
}
/// Any source can either be a ref name (full or partial) or a fully spelled out hex-sha for an object, on the remote side.
///
/// Destinations can only be a partial or full ref-names on the local side.
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone, Hash, Debug)]
pub enum Fetch<'a> {
/// Fetch a ref or refs, without updating local branches.
Only {
/// The partial or full ref name to fetch on the remote side or the full object hex-name, without updating the local side.
/// Note that this may not be a glob pattern, as those need to be matched by a destination which isn't present here.
src: &'a BStr,
},
/// Exclude a single ref.
Exclude {
/// A single partial or full ref name to exclude on the remote, or a pattern with a single `*`. It cannot be a spelled out object hash.
src: &'a BStr,
},
/// Fetch from `src` and update the corresponding destination branches in `dst` accordingly.
AndUpdate {
/// The ref name to fetch on the remote side, or a pattern with a single `*` to match against, or the full object hex-name.
src: &'a BStr,
/// The local destination to update with what was fetched, or a pattern whose single `*` will be replaced with the matching portion
/// of the `*` from `src`.
dst: &'a BStr,
/// If true, allow non-fast-forward updates of `dest`.
allow_non_fast_forward: bool,
},
}

39
src-refspec/src/lib.rs Normal file
View File

@@ -0,0 +1,39 @@
//! Parse git ref-specs and represent them.
#![deny(missing_docs, rust_2018_idioms)]
#![forbid(unsafe_code)]
///
pub mod parse;
pub use parse::function::parse;
///
pub mod instruction;
/// A refspec with references to the memory it was parsed from.
#[derive(Eq, Copy, Clone, Debug)]
pub struct RefSpecRef<'a> {
mode: types::Mode,
op: parse::Operation,
src: Option<&'a bstr::BStr>,
dst: Option<&'a bstr::BStr>,
}
/// An owned refspec.
#[derive(Eq, Clone, Debug)]
pub struct RefSpec {
mode: types::Mode,
op: parse::Operation,
src: Option<bstr::BString>,
dst: Option<bstr::BString>,
}
mod spec;
mod write;
///
pub mod match_group;
pub use match_group::types::MatchGroup;
mod types;
pub use types::Instruction;

View File

@@ -0,0 +1,192 @@
use std::collections::BTreeSet;
use crate::{parse::Operation, types::Mode, MatchGroup, RefSpecRef};
pub(crate) mod types;
pub use types::{match_lhs, match_rhs, Item, Mapping, Source, SourceRef};
///
pub mod validate;
/// Initialization
impl<'a> MatchGroup<'a> {
/// Take all the fetch ref specs from `specs` get a match group ready.
pub fn from_fetch_specs(specs: impl IntoIterator<Item = RefSpecRef<'a>>) -> Self {
MatchGroup {
specs: specs.into_iter().filter(|s| s.op == Operation::Fetch).collect(),
}
}
/// Take all the push ref specs from `specs` get a match group ready.
pub fn from_push_specs(specs: impl IntoIterator<Item = RefSpecRef<'a>>) -> Self {
MatchGroup {
specs: specs.into_iter().filter(|s| s.op == Operation::Push).collect(),
}
}
}
/// Matching
impl<'spec> MatchGroup<'spec> {
/// Match all `items` against all *fetch* specs present in this group, returning deduplicated mappings from source to destination.
/// `items` are expected to be references on the remote, which will be matched and mapped to obtain their local counterparts,
/// i.e. *left side of refspecs is mapped to their right side*.
/// *Note that this method is correct only for fetch-specs*, even though it also *works for push-specs*.
///
/// Object names are never mapped and always returned as match.
///
/// Note that negative matches are not part of the return value, so they are not observable but will be used to remove mappings.
// TODO: figure out how to deal with push-specs, probably when push is being implemented.
pub fn match_lhs<'item>(
self,
mut items: impl Iterator<Item = Item<'item>> + Clone,
) -> match_lhs::Outcome<'spec, 'item> {
let mut out = Vec::new();
let mut seen = BTreeSet::default();
let mut push_unique = |mapping| {
if seen.insert(calculate_hash(&mapping)) {
out.push(mapping);
}
};
let mut matchers: Vec<Option<Matcher<'_>>> = self
.specs
.iter()
.copied()
.map(Matcher::from)
.enumerate()
.map(|(idx, m)| match m.lhs {
Some(Needle::Object(id)) => {
push_unique(Mapping {
item_index: None,
lhs: SourceRef::ObjectId(id),
rhs: m.rhs.map(Needle::to_bstr),
spec_index: idx,
});
None
}
_ => Some(m),
})
.collect();
let mut has_negation = false;
for (spec_index, (spec, matcher)) in self.specs.iter().zip(matchers.iter_mut()).enumerate() {
if spec.mode == Mode::Negative {
has_negation = true;
continue;
}
for (item_index, item) in items.clone().enumerate() {
let Some(matcher) = matcher else { continue };
let (matched, rhs) = matcher.matches_lhs(item);
if matched {
push_unique(Mapping {
item_index: Some(item_index),
lhs: SourceRef::FullName(item.full_ref_name.into()),
rhs,
spec_index,
});
}
}
}
if let Some(hash_kind) = has_negation.then(|| items.next().map(|i| i.target.kind())).flatten() {
let null_id = hash_kind.null();
for matcher in matchers
.into_iter()
.zip(self.specs.iter())
.filter_map(|(m, spec)| m.and_then(|m| (spec.mode == Mode::Negative).then_some(m)))
{
out.retain(|m| match &m.lhs {
SourceRef::ObjectId(_) => true,
SourceRef::FullName(name) => {
!matcher
.matches_lhs(Item {
full_ref_name: name.as_ref(),
target: &null_id,
object: None,
})
.0
}
});
}
}
match_lhs::Outcome {
group: self,
mappings: out,
}
}
/// Match all `items` against all *fetch* specs present in this group, returning deduplicated mappings from destination to source.
/// `items` are expected to be tracking references in the local clone, which will be matched and reverse-mapped to obtain their remote counterparts,
/// i.e. *right side of refspecs is mapped to their left side*.
/// *Note that this method is correct only for fetch-specs*, even though it also *works for push-specs*.
///
/// Note that negative matches are not part of the return value, so they are not observable but will be used to remove mappings.
// Reverse-mapping is implemented here: https://github.com/git/git/blob/76cf4f61c87855ebf0784b88aaf737d6b09f504b/branch.c#L252
pub fn match_rhs<'item>(
self,
mut items: impl Iterator<Item = Item<'item>> + Clone,
) -> match_rhs::Outcome<'spec, 'item> {
let mut out = Vec::<Mapping<'spec, 'item>>::new();
let mut seen = BTreeSet::default();
let mut push_unique = |mapping| {
if seen.insert(calculate_hash(&mapping)) {
out.push(mapping);
}
};
let mut matchers: Vec<Matcher<'_>> = self.specs.iter().copied().map(Matcher::from).collect();
let mut has_negation = false;
for (spec_index, (spec, matcher)) in self.specs.iter().zip(matchers.iter_mut()).enumerate() {
if spec.mode == Mode::Negative {
has_negation = true;
continue;
}
for (item_index, item) in items.clone().enumerate() {
let (matched, lhs) = matcher.matches_rhs(item);
if let Some(lhs) = lhs.filter(|_| matched) {
push_unique(Mapping {
item_index: Some(item_index),
lhs: SourceRef::FullName(lhs),
rhs: Some(item.full_ref_name.into()),
spec_index,
});
}
}
}
if let Some(hash_kind) = has_negation.then(|| items.next().map(|i| i.target.kind())).flatten() {
let null_id = hash_kind.null();
for matcher in matchers
.into_iter()
.zip(self.specs.iter())
.filter_map(|(m, spec)| (spec.mode == Mode::Negative).then_some(m))
{
out.retain(|m| match &m.lhs {
SourceRef::ObjectId(_) => true,
SourceRef::FullName(name) => {
!matcher
.matches_rhs(Item {
full_ref_name: name.as_ref(),
target: &null_id,
object: None,
})
.0
}
});
}
}
match_rhs::Outcome {
group: self,
mappings: out,
}
}
}
fn calculate_hash<T: std::hash::Hash>(t: &T) -> u64 {
use std::hash::Hasher;
let mut s = std::collections::hash_map::DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
mod util;
use util::{Matcher, Needle};

View File

@@ -0,0 +1,114 @@
use std::borrow::Cow;
use bstr::BStr;
use gix_hash::oid;
use crate::RefSpecRef;
/// A match group is able to match a list of ref specs in order while handling negation, conflicts and one to many mappings.
#[derive(Default, Debug, Clone)]
pub struct MatchGroup<'a> {
/// The specs that take part in item matching.
pub specs: Vec<RefSpecRef<'a>>,
}
///
pub mod match_lhs {
use crate::{match_group::Mapping, MatchGroup};
/// The outcome of any matching operation of a [`MatchGroup`].
///
/// It's used to validate and process the contained [mappings](Mapping).
#[derive(Debug, Clone)]
pub struct Outcome<'spec, 'item> {
/// The match group that produced this outcome.
pub group: MatchGroup<'spec>,
/// The mappings derived from matching [items](crate::match_group::Item).
pub mappings: Vec<Mapping<'item, 'spec>>,
}
}
///
pub mod match_rhs {
use crate::{match_group::Mapping, MatchGroup};
/// The outcome of any matching operation of a [`MatchGroup`].
///
/// It's used to validate and process the contained [mappings](Mapping).
#[derive(Debug, Clone)]
pub struct Outcome<'spec, 'item> {
/// The match group that produced this outcome.
pub group: MatchGroup<'spec>,
/// The mappings derived from matching [items](crate::match_group::Item).
pub mappings: Vec<Mapping<'spec, 'item>>,
}
}
/// An item to match, input to various matching operations.
#[derive(Debug, Copy, Clone)]
pub struct Item<'a> {
/// The full name of the references, like `refs/heads/main`
pub full_ref_name: &'a BStr,
/// The id that `full_ref_name` points to, which typically is a commit, but can also be a tag object (or anything else).
pub target: &'a oid,
/// The object an annotated tag is pointing to, if `target` is an annotated tag.
pub object: Option<&'a oid>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
/// The source (or left-hand) side of a mapping.
pub enum SourceRef<'a> {
/// A full reference name, which is expected to be valid.
///
/// Validity, however, is not enforced here.
FullName(Cow<'a, BStr>),
/// The name of an object that is expected to exist on the remote side.
/// Note that it might not be advertised by the remote but part of the object graph,
/// and thus gets sent in the pack. The server is expected to fail unless the desired
/// object is present but at some time it is merely a request by the user.
ObjectId(gix_hash::ObjectId),
}
impl SourceRef<'_> {
/// Create a fully owned instance by consuming this one.
pub fn into_owned(self) -> Source {
match self {
SourceRef::ObjectId(id) => Source::ObjectId(id),
SourceRef::FullName(name) => Source::FullName(name.into_owned().into()),
}
}
}
impl std::fmt::Display for SourceRef<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SourceRef::FullName(name) => name.fmt(f),
SourceRef::ObjectId(id) => id.fmt(f),
}
}
}
/// The source (or left-hand) side of a mapping, which owns its name.
pub type Source = SourceRef<'static>;
/// A mapping from a remote to a local refs for fetches or local to remote refs for pushes.
///
/// Mappings are like edges in a graph, initially without any constraints.
#[derive(Debug, Clone)]
pub struct Mapping<'a, 'b> {
/// The index into the initial `items` list that matched against a spec.
pub item_index: Option<usize>,
/// The name of the remote side for fetches or the local one for pushes that matched.
pub lhs: SourceRef<'a>,
/// The name of the local side for fetches or the remote one for pushes that corresponds to `lhs`, if available.
pub rhs: Option<Cow<'b, BStr>>,
/// The index of the matched ref-spec as seen from the match group.
pub spec_index: usize,
}
impl std::hash::Hash for Mapping<'_, '_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.lhs.hash(state);
self.rhs.hash(state);
}
}

View File

@@ -0,0 +1,212 @@
use std::{borrow::Cow, ops::Range};
use bstr::{BStr, BString, ByteSlice, ByteVec};
use gix_hash::ObjectId;
use crate::{match_group::Item, RefSpecRef};
/// A type keeping enough information about a ref-spec to be able to efficiently match it against multiple matcher items.
#[derive(Debug)]
pub struct Matcher<'a> {
pub(crate) lhs: Option<Needle<'a>>,
pub(crate) rhs: Option<Needle<'a>>,
}
impl<'a> Matcher<'a> {
/// Match the lefthand-side `item` against this spec and return `(true, Some<rhs>)` to gain the other,
/// transformed righthand-side of the match as configured by the refspec.
/// Or return `(true, None)` if there was no `rhs` but the `item` matched.
/// Lastly, return `(false, None)` if `item` didn't match at all.
///
/// This may involve resolving a glob with an allocation, as the destination is built using the matching portion of a glob.
pub fn matches_lhs(&self, item: Item<'_>) -> (bool, Option<Cow<'a, BStr>>) {
match (self.lhs, self.rhs) {
(Some(lhs), None) => (lhs.matches(item).is_match(), None),
(Some(lhs), Some(rhs)) => lhs.matches(item).into_match_outcome(rhs, item),
(None, _) => (false, None),
}
}
/// Match the righthand-side `item` against this spec and return `(true, Some<lhs>)` to gain the other,
/// transformed lefthand-side of the match as configured by the refspec.
/// Or return `(true, None)` if there was no `lhs` but the `item` matched.
/// Lastly, return `(false, None)` if `item` didn't match at all.
///
/// This may involve resolving a glob with an allocation, as the destination is built using the matching portion of a glob.
pub fn matches_rhs(&self, item: Item<'_>) -> (bool, Option<Cow<'a, BStr>>) {
match (self.lhs, self.rhs) {
(None, Some(rhs)) => (rhs.matches(item).is_match(), None),
(Some(lhs), Some(rhs)) => rhs.matches(item).into_match_outcome(lhs, item),
(_, None) => (false, None),
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum Needle<'a> {
FullName(&'a BStr),
PartialName(&'a BStr),
Glob { name: &'a BStr, asterisk_pos: usize },
Pattern(&'a BStr),
Object(ObjectId),
}
enum Match {
/// There was no match.
None,
/// No additional data is provided as part of the match.
Normal,
/// The range of text to copy from the originating item name
GlobRange(Range<usize>),
}
impl Match {
fn is_match(&self) -> bool {
!matches!(self, Match::None)
}
fn into_match_outcome<'a>(self, destination: Needle<'a>, item: Item<'_>) -> (bool, Option<Cow<'a, BStr>>) {
let arg = match self {
Match::None => return (false, None),
Match::Normal => None,
Match::GlobRange(range) => Some((range, item)),
};
(true, destination.to_bstr_replace(arg).into())
}
}
impl<'a> Needle<'a> {
#[inline]
fn matches(&self, item: Item<'_>) -> Match {
match self {
Needle::FullName(name) => {
if *name == item.full_ref_name {
Match::Normal
} else {
Match::None
}
}
Needle::PartialName(name) => crate::spec::expand_partial_name(name, |expanded| {
(expanded == item.full_ref_name).then_some(Match::Normal)
})
.unwrap_or(Match::None),
Needle::Glob { name, asterisk_pos } => {
match item.full_ref_name.get(..*asterisk_pos) {
Some(full_name_portion) if full_name_portion != name[..*asterisk_pos] => {
return Match::None;
}
None => return Match::None,
_ => {}
}
let tail = &name[*asterisk_pos + 1..];
if !item.full_ref_name.ends_with(tail) {
return Match::None;
}
let end = item.full_ref_name.len() - tail.len();
Match::GlobRange(*asterisk_pos..end)
}
Needle::Pattern(pattern) => {
if gix_glob::wildmatch(
pattern,
item.full_ref_name,
gix_glob::wildmatch::Mode::NO_MATCH_SLASH_LITERAL,
) {
Match::Normal
} else {
Match::None
}
}
Needle::Object(id) => {
if *id == item.target {
return Match::Normal;
}
match item.object {
Some(object) if object == *id => Match::Normal,
_ => Match::None,
}
}
}
}
fn to_bstr_replace(self, range: Option<(Range<usize>, Item<'_>)>) -> Cow<'a, BStr> {
match (self, range) {
(Needle::FullName(name), None) => Cow::Borrowed(name),
(Needle::PartialName(name), None) => Cow::Owned({
let mut base: BString = "refs/".into();
if !(name.starts_with(b"tags/") || name.starts_with(b"remotes/")) {
base.push_str("heads/");
}
base.push_str(name);
base
}),
(Needle::Glob { name, asterisk_pos }, Some((range, item))) => {
let mut buf = Vec::with_capacity(name.len() + range.len() - 1);
buf.push_str(&name[..asterisk_pos]);
buf.push_str(&item.full_ref_name[range]);
buf.push_str(&name[asterisk_pos + 1..]);
Cow::Owned(buf.into())
}
(Needle::Object(id), None) => {
let mut name = id.to_string();
name.insert_str(0, "refs/heads/");
Cow::Owned(name.into())
}
(Needle::Pattern(name), None) => Cow::Borrowed(name),
(Needle::Glob { .. }, None) => unreachable!("BUG: no range provided for glob pattern"),
(Needle::Pattern(_), Some(_)) => {
unreachable!("BUG: range provided for pattern, but patterns don't use ranges")
}
(_, Some(_)) => {
unreachable!("BUG: range provided even though needle wasn't a glob. Globs are symmetric.")
}
}
}
pub fn to_bstr(self) -> Cow<'a, BStr> {
self.to_bstr_replace(None)
}
}
impl<'a> From<&'a BStr> for Needle<'a> {
fn from(v: &'a BStr) -> Self {
if let Some(pos) = v.find_byte(b'*') {
Needle::Glob {
name: v,
asterisk_pos: pos,
}
} else if v.starts_with(b"refs/") {
Needle::FullName(v)
} else if let Ok(id) = gix_hash::ObjectId::from_hex(v) {
Needle::Object(id)
} else {
Needle::PartialName(v)
}
}
}
impl<'a> From<RefSpecRef<'a>> for Matcher<'a> {
fn from(v: RefSpecRef<'a>) -> Self {
let mut m = Matcher {
lhs: v.src.map(Into::into),
rhs: v.dst.map(Into::into),
};
if m.rhs.is_none() {
if let Some(src) = v.src {
if must_use_pattern_matching(src) {
m.lhs = Some(Needle::Pattern(src));
}
}
}
m
}
}
/// Check if a pattern is complex enough to require wildmatch instead of simple glob matching
fn must_use_pattern_matching(pattern: &BStr) -> bool {
let asterisk_count = pattern.iter().filter(|&&b| b == b'*').count();
if asterisk_count > 1 {
return true;
}
pattern
.iter()
.any(|&b| b == b'?' || b == b'[' || b == b']' || b == b'\\')
}

View File

@@ -0,0 +1,144 @@
use std::collections::BTreeMap;
use bstr::BString;
use crate::{
match_group::{match_lhs, Source},
RefSpec,
};
/// The error returned [outcome validation](match_lhs::Outcome::validated()).
#[derive(Debug)]
pub struct Error {
/// All issues discovered during validation.
pub issues: Vec<Issue>,
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Found {} {} the refspec mapping to be used: \n\t{}",
self.issues.len(),
if self.issues.len() == 1 {
"issue that prevents"
} else {
"issues that prevent"
},
self.issues
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
.join("\n\t")
)
}
}
impl std::error::Error for Error {}
/// All possible issues found while validating matched mappings.
#[derive(Debug, PartialEq, Eq)]
pub enum Issue {
/// Multiple sources try to write the same destination.
///
/// Note that this issue doesn't take into consideration that these sources might contain the same object behind a reference.
Conflict {
/// The unenforced full name of the reference to be written.
destination_full_ref_name: BString,
/// The list of sources that map to this destination.
sources: Vec<Source>,
/// The list of specs that caused the mapping conflict, each matching the respective one in `sources` to allow both
/// `sources` and `specs` to be zipped together.
specs: Vec<BString>,
},
}
impl std::fmt::Display for Issue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Issue::Conflict {
destination_full_ref_name,
sources,
specs,
} => {
write!(
f,
"Conflicting destination {destination_full_ref_name:?} would be written by {}",
sources
.iter()
.zip(specs.iter())
.map(|(src, spec)| format!("{src} ({spec:?})"))
.collect::<Vec<_>>()
.join(", ")
)
}
}
}
}
/// All possible fixes corrected while validating matched mappings.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Fix {
/// Removed a mapping that contained a partial destination entirely.
MappingWithPartialDestinationRemoved {
/// The destination ref name that was ignored.
name: BString,
/// The spec that defined the mapping
spec: RefSpec,
},
}
impl match_lhs::Outcome<'_, '_> {
/// Validate all mappings or dissolve them into an error stating the discovered issues.
/// Return `(modified self, issues)` providing a fixed-up set of mappings in `self` with the fixed `issues`
/// provided as part of it.
/// Terminal issues are communicated using the [`Error`] type accordingly.
pub fn validated(mut self) -> Result<(Self, Vec<Fix>), Error> {
let mut sources_by_destinations = BTreeMap::new();
for (dst, (spec_index, src)) in self
.mappings
.iter()
.filter_map(|m| m.rhs.as_ref().map(|dst| (dst.as_ref(), (m.spec_index, &m.lhs))))
{
let sources = sources_by_destinations.entry(dst).or_insert_with(Vec::new);
if !sources.iter().any(|(_, lhs)| lhs == &src) {
sources.push((spec_index, src));
}
}
let mut issues = Vec::new();
for (dst, conflicting_sources) in sources_by_destinations.into_iter().filter(|(_, v)| v.len() > 1) {
issues.push(Issue::Conflict {
destination_full_ref_name: dst.to_owned(),
specs: conflicting_sources
.iter()
.map(|(spec_idx, _)| self.group.specs[*spec_idx].to_bstring())
.collect(),
sources: conflicting_sources
.into_iter()
.map(|(_, src)| src.clone().into_owned())
.collect(),
});
}
if !issues.is_empty() {
Err(Error { issues })
} else {
let mut fixed = Vec::new();
let group = &self.group;
self.mappings.retain(|m| match m.rhs.as_ref() {
Some(dst) => {
if dst.starts_with(b"refs/") || dst.as_ref() == "HEAD" {
true
} else {
fixed.push(Fix::MappingWithPartialDestinationRemoved {
name: dst.as_ref().to_owned(),
spec: group.specs[m.spec_index].to_owned(),
});
false
}
}
None => true,
});
Ok((self, fixed))
}
}
}

258
src-refspec/src/parse.rs Normal file
View File

@@ -0,0 +1,258 @@
/// The error returned by the [`parse()`][crate::parse()] function.
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("Empty refspecs are invalid")]
Empty,
#[error("Negative refspecs cannot have destinations as they exclude sources")]
NegativeWithDestination,
#[error("Negative specs must not be empty")]
NegativeEmpty,
#[error("Negative specs must be object hashes")]
NegativeObjectHash,
#[error("Negative specs must be full ref names, starting with \"refs/\"")]
NegativePartialName,
#[error("Negative glob patterns are not allowed")]
NegativeGlobPattern,
#[error("Fetch destinations must be ref-names, like 'HEAD:refs/heads/branch'")]
InvalidFetchDestination,
#[error("Cannot push into an empty destination")]
PushToEmpty,
#[error("glob patterns may only involved a single '*' character, found {pattern:?}")]
PatternUnsupported { pattern: bstr::BString },
#[error("Both sides of the specification need a pattern, like 'a/*:b/*'")]
PatternUnbalanced,
#[error(transparent)]
ReferenceName(#[from] gix_validate::reference::name::Error),
#[error(transparent)]
RevSpec(#[from] gix_revision::spec::parse::Error),
}
/// Define how the parsed refspec should be used.
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone, Hash, Debug)]
pub enum Operation {
/// The `src` side is local and the `dst` side is remote.
Push,
/// The `src` side is remote and the `dst` side is local.
Fetch,
}
pub(crate) mod function {
use crate::{
parse::{Error, Operation},
types::Mode,
RefSpecRef,
};
use bstr::{BStr, ByteSlice};
use gix_error::Exn;
/// Parse `spec` for use in `operation` and return it if it is valid.
pub fn parse(mut spec: &BStr, operation: Operation) -> Result<RefSpecRef<'_>, Error> {
fn fetch_head_only(mode: Mode) -> RefSpecRef<'static> {
RefSpecRef {
mode,
op: Operation::Fetch,
src: Some("HEAD".into()),
dst: None,
}
}
let mode = match spec.first() {
Some(&b'^') => {
spec = &spec[1..];
Mode::Negative
}
Some(&b'+') => {
spec = &spec[1..];
Mode::Force
}
Some(_) => Mode::Normal,
None => {
return match operation {
Operation::Push => Err(Error::Empty),
Operation::Fetch => Ok(fetch_head_only(Mode::Normal)),
}
}
};
let (mut src, dst) = match spec.find_byte(b':') {
Some(pos) => {
if mode == Mode::Negative {
return Err(Error::NegativeWithDestination);
}
let (src, dst) = spec.split_at(pos);
let dst = &dst[1..];
let src = (!src.is_empty()).then(|| src.as_bstr());
let dst = (!dst.is_empty()).then(|| dst.as_bstr());
match (src, dst) {
(None, None) => match operation {
Operation::Push => (None, None),
Operation::Fetch => (Some("HEAD".into()), None),
},
(None, Some(dst)) => match operation {
Operation::Push => (None, Some(dst)),
Operation::Fetch => (Some("HEAD".into()), Some(dst)),
},
(Some(src), None) => match operation {
Operation::Push => return Err(Error::PushToEmpty),
Operation::Fetch => (Some(src), None),
},
(Some(src), Some(dst)) => (Some(src), Some(dst)),
}
}
None => {
let src = (!spec.is_empty()).then_some(spec);
if Operation::Fetch == operation && mode != Mode::Negative && src.is_none() {
return Ok(fetch_head_only(mode));
} else {
(src, None)
}
}
};
if let Some(spec) = src.as_mut() {
if *spec == "@" {
*spec = "HEAD".into();
}
}
let is_one_sided = dst.is_none();
let (src, src_had_pattern) = validated(src, operation == Operation::Push && dst.is_some(), is_one_sided)?;
let (dst, dst_had_pattern) = validated(dst, false, false)?;
// For one-sided refspecs, we don't need to check for pattern balance
if !is_one_sided && mode != Mode::Negative && src_had_pattern != dst_had_pattern {
return Err(Error::PatternUnbalanced);
}
if mode == Mode::Negative {
match src {
Some(spec) => {
if src_had_pattern {
return Err(Error::NegativeGlobPattern);
} else if looks_like_object_hash(spec) {
return Err(Error::NegativeObjectHash);
} else if !spec.starts_with(b"refs/") && spec != "HEAD" {
return Err(Error::NegativePartialName);
}
}
None => return Err(Error::NegativeEmpty),
}
}
Ok(RefSpecRef {
op: operation,
mode,
src,
dst,
})
}
fn looks_like_object_hash(spec: &BStr) -> bool {
spec.len() >= gix_hash::Kind::shortest().len_in_hex() && spec.iter().all(u8::is_ascii_hexdigit)
}
fn validated(
spec: Option<&BStr>,
allow_revspecs: bool,
is_one_sided: bool,
) -> Result<(Option<&BStr>, bool), Error> {
match spec {
Some(spec) => {
let glob_count = spec.iter().filter(|b| **b == b'*').take(2).count();
if glob_count > 1 {
// For one-sided refspecs, allow any number of globs without validation
if !is_one_sided {
return Err(Error::PatternUnsupported { pattern: spec.into() });
}
}
// Check if there are any globs (one or more asterisks)
let has_globs = glob_count > 0;
if has_globs {
// For one-sided refspecs, skip validation of glob patterns
if !is_one_sided {
let mut buf = smallvec::SmallVec::<[u8; 256]>::with_capacity(spec.len());
buf.extend_from_slice(spec);
let glob_pos = buf.find_byte(b'*').expect("glob present");
buf[glob_pos] = b'a';
gix_validate::reference::name_partial(buf.as_bstr())?;
}
} else {
gix_validate::reference::name_partial(spec)
.map_err(Error::from)
.or_else(|err| {
if allow_revspecs {
gix_revision::spec::parse(spec, &mut super::revparse::Noop).map_err(Exn::into_inner)?;
Ok(spec)
} else {
Err(err)
}
})?;
}
Ok((Some(spec), has_globs))
}
None => Ok((None, false)),
}
}
}
mod revparse {
use bstr::BStr;
use gix_error::Exn;
use gix_revision::spec::parse::delegate::{
Kind, Navigate, PeelTo, PrefixHint, ReflogLookup, Revision, SiblingBranch, Traversal,
};
pub(crate) struct Noop;
impl Revision for Noop {
fn find_ref(&mut self, _name: &BStr) -> Result<(), Exn> {
Ok(())
}
fn disambiguate_prefix(&mut self, _prefix: gix_hash::Prefix, _hint: Option<PrefixHint<'_>>) -> Result<(), Exn> {
Ok(())
}
fn reflog(&mut self, _query: ReflogLookup) -> Result<(), Exn> {
Ok(())
}
fn nth_checked_out_branch(&mut self, _branch_no: usize) -> Result<(), Exn> {
Ok(())
}
fn sibling_branch(&mut self, _kind: SiblingBranch) -> Result<(), Exn> {
Ok(())
}
}
impl Navigate for Noop {
fn traverse(&mut self, _kind: Traversal) -> Result<(), Exn> {
Ok(())
}
fn peel_until(&mut self, _kind: PeelTo<'_>) -> Result<(), Exn> {
Ok(())
}
fn find(&mut self, _regex: &BStr, _negated: bool) -> Result<(), Exn> {
Ok(())
}
fn index_lookup(&mut self, _path: &BStr, _stage: u8) -> Result<(), Exn> {
Ok(())
}
}
impl Kind for Noop {
fn kind(&mut self, _kind: gix_revision::spec::Kind) -> Result<(), Exn> {
Ok(())
}
}
impl gix_revision::spec::parse::Delegate for Noop {
fn done(&mut self) -> Result<(), Exn> {
Ok(())
}
}
}

259
src-refspec/src/spec.rs Normal file
View File

@@ -0,0 +1,259 @@
use bstr::{BStr, BString, ByteSlice};
use crate::{
instruction::{Fetch, Push},
parse::Operation,
types::Mode,
Instruction, RefSpec, RefSpecRef,
};
/// Conversion. Use the [`RefSpecRef`][RefSpec::to_ref()] type for more usage options.
impl RefSpec {
/// Return ourselves as reference type.
pub fn to_ref(&self) -> RefSpecRef<'_> {
RefSpecRef {
mode: self.mode,
op: self.op,
src: self.src.as_ref().map(AsRef::as_ref),
dst: self.dst.as_ref().map(AsRef::as_ref),
}
}
/// Return true if the spec stats with a `+` and thus forces setting the reference.
pub fn allow_non_fast_forward(&self) -> bool {
matches!(self.mode, Mode::Force)
}
}
mod impls {
use std::{
cmp::Ordering,
hash::{Hash, Hasher},
};
use crate::{RefSpec, RefSpecRef};
impl From<RefSpecRef<'_>> for RefSpec {
fn from(v: RefSpecRef<'_>) -> Self {
v.to_owned()
}
}
impl Hash for RefSpec {
fn hash<H: Hasher>(&self, state: &mut H) {
self.to_ref().hash(state);
}
}
impl Hash for RefSpecRef<'_> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.instruction().hash(state);
}
}
impl PartialEq for RefSpec {
fn eq(&self, other: &Self) -> bool {
self.to_ref().eq(&other.to_ref())
}
}
impl PartialEq for RefSpecRef<'_> {
fn eq(&self, other: &Self) -> bool {
self.instruction().eq(&other.instruction())
}
}
impl PartialOrd for RefSpecRef<'_> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[allow(clippy::non_canonical_partial_ord_impl)]
impl PartialOrd for RefSpec {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.to_ref().cmp(&other.to_ref()))
}
}
impl Ord for RefSpecRef<'_> {
fn cmp(&self, other: &Self) -> Ordering {
self.instruction().cmp(&other.instruction())
}
}
impl Ord for RefSpec {
fn cmp(&self, other: &Self) -> Ordering {
self.to_ref().cmp(&other.to_ref())
}
}
}
/// Access
impl<'a> RefSpecRef<'a> {
/// Return the left-hand side of the spec, typically the source.
/// It takes many different forms so don't rely on this being a ref name.
///
/// It's not present in case of deletions.
pub fn source(&self) -> Option<&BStr> {
self.src
}
/// Return the right-hand side of the spec, typically the destination.
/// It takes many different forms so don't rely on this being a ref name.
///
/// It's not present in case of source-only specs.
pub fn destination(&self) -> Option<&BStr> {
self.dst
}
/// Always returns the remote side, whose actual side in the refspec depends on how it was parsed.
pub fn remote(&self) -> Option<&BStr> {
match self.op {
Operation::Push => self.dst,
Operation::Fetch => self.src,
}
}
/// Always returns the local side, whose actual side in the refspec depends on how it was parsed.
pub fn local(&self) -> Option<&BStr> {
match self.op {
Operation::Push => self.src,
Operation::Fetch => self.dst,
}
}
/// Derive the prefix from the [`source`][Self::source()] side of this spec if this is a fetch spec,
/// or the [`destination`][Self::destination()] side if it is a push spec, if it is possible to do so without ambiguity.
///
/// This means it starts with `refs/`. Note that it won't contain more than two components, like `refs/heads/`
pub fn prefix(&self) -> Option<&BStr> {
if self.mode == Mode::Negative {
return None;
}
let source = match self.op {
Operation::Fetch => self.source(),
Operation::Push => self.destination(),
}?;
if source == "HEAD" {
return source.into();
}
let suffix = source.strip_prefix(b"refs/")?;
let slash_pos = suffix.find_byte(b'/')?;
let prefix = source[..="refs/".len() + slash_pos].as_bstr();
(!prefix.contains(&b'*')).then_some(prefix)
}
/// As opposed to [`prefix()`][Self::prefix], if the latter is `None` it will expand to all possible prefixes and place them in `out`.
///
/// Note that only the `source` side is considered.
pub fn expand_prefixes(&self, out: &mut Vec<BString>) {
match self.prefix() {
Some(prefix) => out.push(prefix.into()),
None => {
let source = match match self.op {
Operation::Fetch => self.source(),
Operation::Push => self.destination(),
} {
Some(source) => source,
None => return,
};
if let Some(rest) = source.strip_prefix(b"refs/") {
if !rest.contains(&b'/') {
out.push(source.into());
}
return;
} else if gix_hash::ObjectId::from_hex(source).is_ok() {
return;
}
expand_partial_name(source, |expanded| {
out.push(expanded.into());
None::<()>
});
}
}
}
/// Transform the state of the refspec into an instruction making clear what to do with it.
pub fn instruction(&self) -> Instruction<'a> {
match self.op {
Operation::Fetch => match (self.mode, self.src, self.dst) {
(Mode::Normal | Mode::Force, Some(src), None) => Instruction::Fetch(Fetch::Only { src }),
(Mode::Normal | Mode::Force, Some(src), Some(dst)) => Instruction::Fetch(Fetch::AndUpdate {
src,
dst,
allow_non_fast_forward: matches!(self.mode, Mode::Force),
}),
(Mode::Negative, Some(src), None) => Instruction::Fetch(Fetch::Exclude { src }),
(mode, src, dest) => {
unreachable!(
"BUG: fetch instructions with {:?} {:?} {:?} are not possible",
mode, src, dest
)
}
},
Operation::Push => match (self.mode, self.src, self.dst) {
(Mode::Normal | Mode::Force, Some(src), None) => Instruction::Push(Push::Matching {
src,
dst: src,
allow_non_fast_forward: matches!(self.mode, Mode::Force),
}),
(Mode::Normal | Mode::Force, None, Some(dst)) => {
Instruction::Push(Push::Delete { ref_or_pattern: dst })
}
(Mode::Normal | Mode::Force, None, None) => Instruction::Push(Push::AllMatchingBranches {
allow_non_fast_forward: matches!(self.mode, Mode::Force),
}),
(Mode::Normal | Mode::Force, Some(src), Some(dst)) => Instruction::Push(Push::Matching {
src,
dst,
allow_non_fast_forward: matches!(self.mode, Mode::Force),
}),
(Mode::Negative, Some(src), None) => Instruction::Push(Push::Exclude { src }),
(mode, src, dest) => {
unreachable!(
"BUG: push instructions with {:?} {:?} {:?} are not possible",
mode, src, dest
)
}
},
}
}
}
/// Conversion
impl RefSpecRef<'_> {
/// Convert this ref into a standalone, owned copy.
pub fn to_owned(&self) -> RefSpec {
RefSpec {
mode: self.mode,
op: self.op,
src: self.src.map(ToOwned::to_owned),
dst: self.dst.map(ToOwned::to_owned),
}
}
}
pub(crate) fn expand_partial_name<T>(name: &BStr, mut cb: impl FnMut(&BStr) -> Option<T>) -> Option<T> {
use bstr::ByteVec;
let mut buf = BString::from(Vec::with_capacity(128));
for (base, append_head) in [
("", false),
("refs/", false),
("refs/tags/", false),
("refs/heads/", false),
("refs/remotes/", false),
("refs/remotes/", true),
] {
buf.clear();
buf.push_str(base);
buf.push_str(name);
if append_head {
buf.push_str("/HEAD");
}
if let Some(res) = cb(buf.as_ref()) {
return Some(res);
}
}
None
}

21
src-refspec/src/types.rs Normal file
View File

@@ -0,0 +1,21 @@
use crate::instruction;
/// The way to interpret a refspec.
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone, Hash, Debug)]
pub(crate) enum Mode {
/// Apply standard rules for refspecs which are including refs with specific rules related to allowing fast forwards of destinations.
Normal,
/// Even though according to normal rules a non-fastforward would be denied, override this and reset a ref forcefully in the destination.
Force,
/// Instead of considering matching refs included, we consider them excluded. This applies only to the source side of a refspec.
Negative,
}
/// Tells what to do and is derived from a [`RefSpec`][crate::RefSpecRef].
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone, Hash, Debug)]
pub enum Instruction<'a> {
/// An instruction for pushing.
Push(instruction::Push<'a>),
/// An instruction for fetching.
Fetch(instruction::Fetch<'a>),
}

74
src-refspec/src/write.rs Normal file
View File

@@ -0,0 +1,74 @@
use bstr::BString;
use crate::{
instruction::{Fetch, Push},
Instruction, RefSpecRef,
};
impl RefSpecRef<'_> {
/// Reproduce ourselves in parseable form.
pub fn to_bstring(&self) -> BString {
let mut buf = Vec::with_capacity(128);
self.write_to(&mut buf).expect("no io error");
buf.into()
}
/// Serialize ourselves in a parseable format to `out`.
pub fn write_to(&self, out: &mut dyn std::io::Write) -> std::io::Result<()> {
self.instruction().write_to(out)
}
}
impl Instruction<'_> {
/// Reproduce ourselves in parseable form.
pub fn to_bstring(&self) -> BString {
let mut buf = Vec::with_capacity(128);
self.write_to(&mut buf).expect("no io error");
buf.into()
}
/// Serialize ourselves in a parseable format to `out`.
pub fn write_to(&self, out: &mut dyn std::io::Write) -> std::io::Result<()> {
match self {
Instruction::Push(Push::Matching {
src,
dst,
allow_non_fast_forward,
}) => {
if *allow_non_fast_forward {
out.write_all(b"+")?;
}
out.write_all(src)?;
out.write_all(b":")?;
out.write_all(dst)
}
Instruction::Push(Push::AllMatchingBranches { allow_non_fast_forward }) => {
if *allow_non_fast_forward {
out.write_all(b"+")?;
}
out.write_all(b":")
}
Instruction::Push(Push::Delete { ref_or_pattern }) => {
out.write_all(b":")?;
out.write_all(ref_or_pattern)
}
Instruction::Fetch(Fetch::Only { src }) => out.write_all(src),
Instruction::Fetch(Fetch::Exclude { src }) | Instruction::Push(Push::Exclude { src }) => {
out.write_all(b"^")?;
out.write_all(src)
}
Instruction::Fetch(Fetch::AndUpdate {
src,
dst,
allow_non_fast_forward,
}) => {
if *allow_non_fast_forward {
out.write_all(b"+")?;
}
out.write_all(src)?;
out.write_all(b":")?;
out.write_all(dst)
}
}
}
}

View File

@@ -0,0 +1 @@
/match_baseline.tar

Binary file not shown.

View File

@@ -0,0 +1,90 @@
#!/usr/bin/env bash
set -eu -o pipefail
git init;
function baseline() {
{
git fetch --refmap= --dry-run -v origin "$@" 2>&1 || :
echo specs: "$@"
} >> baseline.git
}
mkdir base
(cd base
git init
touch file
git add file
git commit -m "initial commit"
git tag -m "message" annotated-v0.0
git checkout -b f1
git commit -m "f1" --allow-empty
git tag v0.0-f1
git checkout -b f2 main
git commit -m "f2" --allow-empty
git tag v0.0-f2
git checkout -b f3 main
git commit -m "f3" --allow-empty
git tag v0.0-f3
git checkout -b sub/f4 main
git checkout -b sub/subdir/f5 main
git checkout -b suub/f6 main
)
git clone --shared ./base clone
(cd clone
git ls-remote 2>&1 > remote-refs.list
baseline "refs/heads/main"
baseline "heads/main"
baseline "main"
baseline "v0.0-f1"
baseline "tags/v0.0-f2"
baseline "78b1c1be9421b33a49a7a8176d93eeeafa112da1"
baseline "9d2fab1a0ba3585d0bc50922bfdd04ebb59361df"
baseline "78b1c1be9421b33a49a7a8176d93eeeafa112da1:special"
baseline "78b1c1be9421b33a49a7a8176d93eeeafa112da1:1111111111111111111111111111111111111111"
baseline "9d2fab1a0ba3585d0bc50922bfdd04ebb59361df:tags/special"
baseline "9d2fab1a0ba3585d0bc50922bfdd04ebb59361df:refs/tags/special"
baseline "f1:origin/f1"
baseline "f1:remotes/origin/f1"
baseline "f1:notes/f1"
baseline "+refs/heads/*:refs/remotes/origin/*"
baseline "refs/heads/*1:refs/remotes/origin/*1"
baseline "refs/heads/f*:refs/remotes/origin/a*"
baseline "refs/heads/*/f6:refs/remotes/origin/*/f6"
baseline "main" "f1"
baseline "heads/main" "heads/f1"
baseline "refs/heads/main" "refs/heads/f1"
baseline "refs/heads/*:refs/remotes/origin/*" "^main"
baseline "heads/f1" "f2" "refs/heads/f3" "heads/main"
baseline "f*:a*" "refs/heads/main"
baseline "refs/heads/f*:refs/remotes/origin/a*" "^f1"
baseline "refs/heads/f*:refs/remotes/origin/a*" "^refs/heads/f1"
baseline "^heads/f2" "refs/heads/f*:refs/remotes/origin/a*"
baseline "heads/f2" "^refs/heads/f*:refs/remotes/origin/a*"
baseline "^refs/heads/f2" "refs/heads/f*:refs/remotes/origin/a*"
baseline "^main" "refs/heads/*:refs/remotes/origin/*"
baseline "^refs/heads/main" "refs/heads/*:refs/remotes/origin/*"
baseline "refs/heads/*:refs/remotes/origin/*" "^refs/heads/main"
baseline "refs/heads/*:refs/remotes/origin/*" "refs/heads/main:refs/remotes/new-origin/main"
baseline "refs/heads/*:refs/remotes/origin/*" "refs/heads/main:refs/remotes/origin/main"
baseline "refs/heads/f1:refs/remotes/origin/conflict" "refs/heads/f2:refs/remotes/origin/conflict"
baseline "refs/heads/f1:refs/remotes/origin/conflict2" "refs/heads/f2:refs/remotes/origin/conflict2" "refs/heads/f1:refs/remotes/origin/conflict" "refs/heads/f2:refs/remotes/origin/conflict" "refs/heads/f3:refs/remotes/origin/conflict"
baseline "refs/heads/f1:refs/remotes/origin/same" "refs/tags/v0.0-f1:refs/remotes/origin/same" # same object, not technically a problem but git flags it anyway
baseline "refs/tags/*:refs/remotes/origin/*" "refs/heads/*:refs/remotes/origin/*"
baseline "refs/tags/*:refs/tags/*"
baseline 'refs/heads/f*:foo/f*' 'f1:f1'
baseline "+refs/heads/*:refs/remotes/origin/*" "refs/heads/f1:refs/remotes/origin/f2" "refs/heads/f2:refs/remotes/origin/f1"
baseline ':refs/heads/f1'
baseline ':f1'
baseline ':'
baseline 'HEAD:'
baseline '@:'
baseline '@:f1'
baseline '@:HEAD'
)

View File

@@ -0,0 +1,132 @@
#!/usr/bin/env bash
set -eu -o pipefail
git init;
function baseline() {
local kind=$1
local refspec=$2
local force_fail=${3:-}
cat <<EOF >.git/config
[remote "test"]
url = .
$kind = "$refspec"
EOF
git ls-remote "test" && status=0 || status=$?
if [ -n "$force_fail" ]; then
status=128
fi
{
echo "$kind" "$refspec"
echo "$status"
} >> baseline.git
}
# invalid
baseline push ''
baseline push '::'
baseline fetch '::'
baseline fetch '^a:'
baseline fetch '^a:b'
baseline fetch '^:'
baseline fetch '^:b'
baseline fetch '^'
baseline push '^'
baseline fetch '^refs/heads/qa/*/*'
baseline push '^refs/heads/qa/*/*'
baseline push 'main~1'
baseline fetch 'main~1'
baseline push 'main~1:other~1'
baseline push ':main~1'
baseline push 'refs/heads/*:refs/remotes/frotz'
baseline push 'refs/heads:refs/remotes/frotz/*'
baseline fetch 'refs/heads/*:refs/remotes/frotz'
baseline fetch 'refs/heads:refs/remotes/frotz/*'
baseline fetch 'refs/heads/main::refs/remotes/frotz/xyzzy'
baseline fetch 'refs/heads/maste :refs/remotes/frotz/xyzzy'
baseline fetch 'main~1:refs/remotes/frotz/backup'
baseline fetch 'HEAD~4:refs/remotes/frotz/new'
baseline push 'refs/heads/ nitfol'
baseline fetch 'refs/heads/ nitfol'
baseline push 'HEAD:'
baseline push 'refs/heads/ nitfol:'
baseline fetch 'refs/heads/ nitfol:'
baseline push ':refs/remotes/frotz/delete me'
baseline fetch ':refs/remotes/frotz/HEAD to me'
baseline fetch 'refs/heads/*/*/for-linus:refs/remotes/mine/*'
baseline push 'refs/heads/*/*/for-linus:refs/remotes/mine/*'
baseline fetch 'refs/heads/*g*/for-linus:refs/remotes/mine/*'
baseline push 'refs/heads/*g*/for-linus:refs/remotes/mine/*'
bad=$(printf '\011tab')
baseline fetch "refs/heads/${bad}"
baseline fetch 'refs/*/*'
baseline fetch 'refs/heads/*'
baseline fetch '^refs/*/*'
# valid
baseline push '+:'
baseline push ':'
baseline fetch 55e825ebe8fd2ff78cad3826afb696b96b576a7e
baseline fetch ''
baseline fetch ':'
baseline fetch '+'
baseline push 'refs/heads/main:refs/remotes/frotz/xyzzy'
baseline fetch '55e825ebe8fd2ff78cad3826afb696b96b576a7e:refs/heads/main'
baseline push 'refs/heads/*:refs/remotes/frotz/*'
baseline fetch 'refs/heads/*:refs/remotes/frotz/*'
baseline fetch 'heads/main'
baseline fetch 'refs/heads/main:refs/remotes/frotz/xyzzy'
baseline push 'main~1:refs/remotes/frotz/backup'
baseline push 'HEAD~4:refs/remotes/frotz/new'
baseline push 'HEAD'
baseline fetch 'HEAD'
baseline push '@'
baseline fetch '@'
baseline push '^@'
baseline fetch '^@'
baseline fetch '^refs/heads/main'
baseline fetch '^refs/heads/*'
baseline fetch '^heads/main'
baseline fetch '^heads/*'
baseline push '+@'
baseline fetch '+@'
baseline fetch 'HEAD:'
baseline push ':refs/remotes/frotz/deleteme'
baseline fetch ':refs/remotes/frotz/HEAD-to-me'
baseline push ':a'
baseline push '+:a'
baseline fetch ':a'
baseline fetch '+:a'
baseline fetch 'refs/heads/*/for-linus:refs/remotes/mine/*-blah'
baseline push 'refs/heads/*/for-linus:refs/remotes/mine/*-blah'
baseline fetch 'refs/heads*/for-linus:refs/remotes/mine/*'
baseline push 'refs/heads*/for-linus:refs/remotes/mine/*'
baseline fetch 'refs/heads/*/for-linus:refs/remotes/mine/*'
baseline push 'refs/heads/*/for-linus:refs/remotes/mine/*'
good=$(printf '\303\204')
baseline fetch "refs/heads/${good}"

View File

@@ -0,0 +1,26 @@
use std::collections::{BTreeSet, HashSet};
use gix_refspec::{parse::Operation, RefSpec};
fn pair() -> Vec<RefSpec> {
let lhs = gix_refspec::parse("refs/heads/foo".into(), Operation::Push).unwrap();
let rhs = gix_refspec::parse("refs/heads/foo:refs/heads/foo".into(), Operation::Push).unwrap();
vec![lhs.to_owned(), rhs.to_owned()]
}
#[test]
fn cmp() {
assert_eq!(BTreeSet::from_iter(pair()).len(), 1);
}
#[test]
fn hash() {
let set: HashSet<_> = pair().into_iter().collect();
assert_eq!(set.len(), 1);
}
#[test]
fn eq() {
let specs = pair();
assert_eq!(&specs[0], &specs[1]);
}

View File

@@ -0,0 +1,10 @@
extern crate core;
use gix_testtools::Result;
mod impls;
mod match_group;
mod matching;
mod parse;
mod spec;
mod write;

View File

@@ -0,0 +1,343 @@
mod single {
use crate::matching::baseline;
#[test]
fn fetch_only() {
baseline::agrees_with_fetch_specs(Some("refs/heads/main"));
baseline::agrees_with_fetch_specs(Some("heads/main"));
baseline::agrees_with_fetch_specs(Some("main"));
baseline::agrees_with_fetch_specs(Some("v0.0-f1"));
baseline::agrees_with_fetch_specs(Some("tags/v0.0-f2"));
baseline::of_objects_always_matches_if_the_server_has_the_object(Some(
"78b1c1be9421b33a49a7a8176d93eeeafa112da1",
));
baseline::of_objects_always_matches_if_the_server_has_the_object(Some(
"9d2fab1a0ba3585d0bc50922bfdd04ebb59361df",
));
}
#[test]
fn fetch_and_update() {
baseline::of_objects_with_destinations_are_written_into_given_local_branches(
Some("78b1c1be9421b33a49a7a8176d93eeeafa112da1:special"),
["78b1c1be9421b33a49a7a8176d93eeeafa112da1:refs/heads/special"],
);
baseline::of_objects_with_destinations_are_written_into_given_local_branches(
Some("78b1c1be9421b33a49a7a8176d93eeeafa112da1:1111111111111111111111111111111111111111"),
["78b1c1be9421b33a49a7a8176d93eeeafa112da1:refs/heads/1111111111111111111111111111111111111111"],
);
baseline::of_objects_with_destinations_are_written_into_given_local_branches(
Some("9d2fab1a0ba3585d0bc50922bfdd04ebb59361df:tags/special"),
["9d2fab1a0ba3585d0bc50922bfdd04ebb59361df:refs/tags/special"],
);
baseline::of_objects_with_destinations_are_written_into_given_local_branches(
Some("9d2fab1a0ba3585d0bc50922bfdd04ebb59361df:refs/tags/special"),
["9d2fab1a0ba3585d0bc50922bfdd04ebb59361df:refs/tags/special"],
);
baseline::agrees_but_observable_refs_are_vague(Some("f1:origin/f1"), ["refs/heads/f1:refs/heads/origin/f1"]);
baseline::agrees_but_observable_refs_are_vague(
Some("f1:remotes/origin/f1"),
["refs/heads/f1:refs/remotes/origin/f1"],
);
baseline::agrees_but_observable_refs_are_vague(Some("f1:notes/f1"), ["refs/heads/f1:refs/heads/notes/f1"]);
baseline::agrees_with_fetch_specs(Some("+refs/heads/*:refs/remotes/origin/*"));
baseline::agrees_with_fetch_specs(Some("refs/heads/f*:refs/remotes/origin/a*"));
baseline::agrees_with_fetch_specs(Some("refs/heads/*1:refs/remotes/origin/*1"));
}
}
mod multiple {
use gix_refspec::{
match_group::validate::Fix,
parse::{Error, Operation},
};
use crate::matching::baseline;
#[test]
fn fetch_only() {
baseline::agrees_with_fetch_specs(["main", "f1"]);
baseline::agrees_with_fetch_specs(["heads/main", "heads/f1"]);
baseline::agrees_with_fetch_specs(["refs/heads/main", "refs/heads/f1"]);
baseline::agrees_with_fetch_specs(["heads/f1", "f2", "refs/heads/f3", "heads/main"]);
baseline::agrees_with_fetch_specs(["f*:a*", "refs/heads/main"]);
baseline::agrees_with_fetch_specs([
"refs/tags/*:refs/remotes/origin/*",
"refs/heads/*:refs/remotes/origin/*",
]);
baseline::agrees_with_fetch_specs(["refs/tags/*:refs/tags/*"]);
}
#[test]
fn fetch_and_update_and_negations() {
baseline::invalid_specs_fail_to_parse_where_git_shows_surprising_behaviour(
["refs/heads/f*:refs/remotes/origin/a*", "^f1"],
Error::NegativePartialName,
);
baseline::invalid_specs_fail_to_parse_where_git_shows_surprising_behaviour(
["heads/f2", "^refs/heads/f*:refs/remotes/origin/a*"],
Error::NegativeWithDestination,
);
baseline::agrees_with_fetch_specs(["refs/heads/f*:refs/remotes/origin/a*", "^refs/heads/f1"]);
baseline::invalid_specs_fail_to_parse_where_git_shows_surprising_behaviour(
["^heads/f2", "refs/heads/f*:refs/remotes/origin/a*"],
Error::NegativePartialName,
);
baseline::agrees_with_fetch_specs(["^refs/heads/f2", "refs/heads/f*:refs/remotes/origin/a*"]);
baseline::invalid_specs_fail_to_parse_where_git_shows_surprising_behaviour(
["^main", "refs/heads/*:refs/remotes/origin/*"],
Error::NegativePartialName,
);
baseline::agrees_with_fetch_specs(["^refs/heads/main", "refs/heads/*:refs/remotes/origin/*"]);
baseline::agrees_with_fetch_specs(["refs/heads/*:refs/remotes/origin/*", "^refs/heads/main"]);
}
#[test]
fn fetch_and_update_with_empty_lhs() {
baseline::agrees_but_observable_refs_are_vague([":refs/heads/f1"], ["HEAD:refs/heads/f1"]);
baseline::agrees_but_observable_refs_are_vague([":f1"], ["HEAD:refs/heads/f1"]);
baseline::agrees_but_observable_refs_are_vague(["@:f1"], ["HEAD:refs/heads/f1"]);
}
#[test]
fn fetch_and_update_head_to_head_never_updates_actual_head_ref() {
baseline::agrees_but_observable_refs_are_vague(["@:HEAD"], ["HEAD:refs/heads/HEAD"]);
}
#[test]
fn fetch_and_update_head_with_empty_rhs() {
baseline::agrees_but_observable_refs_are_vague([":"], ["HEAD:"]);
baseline::agrees_but_observable_refs_are_vague(["HEAD:"], ["HEAD:"]);
baseline::agrees_but_observable_refs_are_vague(["@:"], ["HEAD:"]);
}
#[test]
fn fetch_and_update_multiple_destinations() {
baseline::agrees_with_fetch_specs([
"refs/heads/*:refs/remotes/origin/*",
"refs/heads/main:refs/remotes/new-origin/main",
]);
baseline::agrees_with_fetch_specs([
"refs/heads/*:refs/remotes/origin/*",
"refs/heads/main:refs/remotes/origin/main", // duplicates are removed immediately.
]);
}
#[test]
fn fetch_and_update_with_conflicts() {
baseline::agrees_with_fetch_specs_validation_error(
[
"refs/heads/f1:refs/remotes/origin/conflict",
"refs/heads/f2:refs/remotes/origin/conflict",
],
"Found 1 issue that prevents the refspec mapping to be used: \n\tConflicting destination \"refs/remotes/origin/conflict\" would be written by refs/heads/f1 (\"refs/heads/f1:refs/remotes/origin/conflict\"), refs/heads/f2 (\"refs/heads/f2:refs/remotes/origin/conflict\")",
);
baseline::agrees_with_fetch_specs_validation_error(
[
"refs/heads/f1:refs/remotes/origin/conflict2",
"refs/heads/f2:refs/remotes/origin/conflict2",
"refs/heads/f1:refs/remotes/origin/conflict",
"refs/heads/f2:refs/remotes/origin/conflict",
"refs/heads/f3:refs/remotes/origin/conflict",
],
"Found 2 issues that prevent the refspec mapping to be used: \n\tConflicting destination \"refs/remotes/origin/conflict\" would be written by refs/heads/f1 (\"refs/heads/f1:refs/remotes/origin/conflict\"), refs/heads/f2 (\"refs/heads/f2:refs/remotes/origin/conflict\"), refs/heads/f3 (\"refs/heads/f3:refs/remotes/origin/conflict\")\n\tConflicting destination \"refs/remotes/origin/conflict2\" would be written by refs/heads/f1 (\"refs/heads/f1:refs/remotes/origin/conflict2\"), refs/heads/f2 (\"refs/heads/f2:refs/remotes/origin/conflict2\")",
);
baseline::agrees_with_fetch_specs_validation_error(
[
"refs/heads/f1:refs/remotes/origin/same",
"refs/tags/v0.0-f1:refs/remotes/origin/same",
],
"Found 1 issue that prevents the refspec mapping to be used: \n\tConflicting destination \"refs/remotes/origin/same\" would be written by refs/heads/f1 (\"refs/heads/f1:refs/remotes/origin/same\"), refs/tags/v0.0-f1 (\"refs/tags/v0.0-f1:refs/remotes/origin/same\")",
);
baseline::agrees_with_fetch_specs_validation_error(
[
"+refs/heads/*:refs/remotes/origin/*",
"refs/heads/f1:refs/remotes/origin/f2",
"refs/heads/f2:refs/remotes/origin/f1",
],
"Found 2 issues that prevent the refspec mapping to be used: \n\tConflicting destination \"refs/remotes/origin/f1\" would be written by refs/heads/f1 (\"+refs/heads/*:refs/remotes/origin/*\"), refs/heads/f2 (\"refs/heads/f2:refs/remotes/origin/f1\")\n\tConflicting destination \"refs/remotes/origin/f2\" would be written by refs/heads/f2 (\"+refs/heads/*:refs/remotes/origin/*\"), refs/heads/f1 (\"refs/heads/f1:refs/remotes/origin/f2\")",
);
}
#[test]
fn fetch_and_update_with_fixes() {
let glob_spec = "refs/heads/f*:foo/f*";
let glob_spec_ref = gix_refspec::parse(glob_spec.into(), Operation::Fetch).unwrap();
baseline::agrees_and_applies_fixes(
[glob_spec, "f1:f1"],
[
Fix::MappingWithPartialDestinationRemoved {
name: "foo/f1".into(),
spec: glob_spec_ref.to_owned(),
},
Fix::MappingWithPartialDestinationRemoved {
name: "foo/f2".into(),
spec: glob_spec_ref.to_owned(),
},
Fix::MappingWithPartialDestinationRemoved {
name: "foo/f3".into(),
spec: glob_spec_ref.to_owned(),
},
],
["refs/heads/f1:refs/heads/f1"],
);
}
}
mod complex_globs {
use bstr::BString;
use gix_hash::ObjectId;
use gix_refspec::{parse::Operation, MatchGroup};
#[test]
fn one_sided_complex_glob_patterns_can_be_parsed() {
// The key change is that complex glob patterns with multiple asterisks
// can now be parsed for one-sided refspecs
let spec = gix_refspec::parse("refs/*/foo/*".into(), Operation::Fetch);
assert!(spec.is_ok(), "Should parse complex glob pattern for one-sided refspec");
let spec = gix_refspec::parse("refs/*/*/bar".into(), Operation::Fetch);
assert!(
spec.is_ok(),
"Should parse complex glob pattern with multiple asterisks"
);
let spec = gix_refspec::parse("refs/heads/[a-z.]/release/*".into(), Operation::Fetch);
assert!(spec.is_ok(), "Should parse complex glob pattern");
// Two-sided refspecs with multiple asterisks should still fail
let spec = gix_refspec::parse("refs/*/foo/*:refs/remotes/*".into(), Operation::Fetch);
assert!(spec.is_err(), "Two-sided refspecs with multiple asterisks should fail");
}
#[test]
fn one_sided_simple_glob_patterns_match() {
// Test that simple glob patterns (one asterisk) work correctly with matching
let refs = [
new_ref("refs/heads/feature/foo", "1111111111111111111111111111111111111111"),
new_ref("refs/heads/bugfix/bar", "2222222222222222222222222222222222222222"),
new_ref("refs/tags/v1.0", "3333333333333333333333333333333333333333"),
new_ref("refs/pull/123", "4444444444444444444444444444444444444444"),
];
let items: Vec<_> = refs.iter().map(|r| r.to_item()).collect();
// Test: refs/heads/* should match all refs under refs/heads/
let spec = gix_refspec::parse("refs/heads/*".into(), Operation::Fetch).unwrap();
let group = MatchGroup::from_fetch_specs([spec]);
let outcome = group.match_lhs(items.iter().copied());
insta::assert_debug_snapshot!(outcome.mappings, @r#"
[
Mapping {
item_index: Some(
0,
),
lhs: FullName(
"refs/heads/feature/foo",
),
rhs: None,
spec_index: 0,
},
Mapping {
item_index: Some(
1,
),
lhs: FullName(
"refs/heads/bugfix/bar",
),
rhs: None,
spec_index: 0,
},
]
"#);
// Test: refs/tags/* should match all refs under refs/tags/
let items: Vec<_> = refs.iter().map(|r| r.to_item()).collect();
let spec = gix_refspec::parse("refs/tags/v[0-9]*".into(), Operation::Fetch).unwrap();
let group = MatchGroup::from_fetch_specs([spec]);
let outcome = group.match_lhs(items.iter().copied());
insta::assert_debug_snapshot!(outcome.mappings, @r#"
[
Mapping {
item_index: Some(
2,
),
lhs: FullName(
"refs/tags/v1.0",
),
rhs: None,
spec_index: 0,
},
]
"#);
}
#[test]
fn one_sided_glob_with_suffix_matches() {
// Test that glob patterns with suffix work correctly
let refs = [
new_ref("refs/heads/feature", "1111111111111111111111111111111111111111"),
new_ref("refs/heads/feat", "2222222222222222222222222222222222222222"),
new_ref("refs/heads/main", "3333333333333333333333333333333333333333"),
];
let items: Vec<_> = refs.iter().map(|r| r.to_item()).collect();
// Test: refs/heads/feat* should match refs/heads/feature and refs/heads/feat
let spec = gix_refspec::parse("refs/heads/feat*".into(), Operation::Fetch).unwrap();
let group = MatchGroup::from_fetch_specs([spec]);
let outcome = group.match_lhs(items.iter().copied());
let mappings = outcome.mappings;
insta::assert_debug_snapshot!(mappings, @r#"
[
Mapping {
item_index: Some(
0,
),
lhs: FullName(
"refs/heads/feature",
),
rhs: None,
spec_index: 0,
},
Mapping {
item_index: Some(
1,
),
lhs: FullName(
"refs/heads/feat",
),
rhs: None,
spec_index: 0,
},
]
"#);
}
fn new_ref(name: &str, id_hex: &str) -> Ref {
Ref {
name: name.into(),
target: ObjectId::from_hex(id_hex.as_bytes()).unwrap(),
object: None,
}
}
#[derive(Debug, Clone)]
struct Ref {
name: BString,
target: ObjectId,
object: Option<ObjectId>,
}
impl Ref {
fn to_item(&self) -> gix_refspec::match_group::Item<'_> {
gix_refspec::match_group::Item {
full_ref_name: self.name.as_ref(),
target: &self.target,
object: self.object.as_deref(),
}
}
}
}

View File

@@ -0,0 +1,310 @@
use std::sync::LazyLock;
static BASELINE: LazyLock<baseline::Baseline> = LazyLock::new(|| baseline::parse().unwrap());
pub mod baseline {
use std::{borrow::Borrow, collections::HashMap};
use bstr::{BString, ByteSlice, ByteVec};
use gix_hash::ObjectId;
use gix_refspec::{
match_group::{validate::Fix, SourceRef},
parse::Operation,
MatchGroup,
};
use std::sync::LazyLock;
use crate::matching::BASELINE;
#[derive(Debug)]
pub struct Ref {
pub name: BString,
pub target: ObjectId,
/// Set if `target` is an annotated tag, this being the object it points to.
pub object: Option<ObjectId>,
}
impl Ref {
pub fn to_item(&self) -> gix_refspec::match_group::Item<'_> {
gix_refspec::match_group::Item {
full_ref_name: self.name.borrow(),
target: &self.target,
object: self.object.as_deref(),
}
}
}
static INPUT: LazyLock<Vec<Ref>> = LazyLock::new(|| parse_input().unwrap());
pub type Baseline = HashMap<Vec<BString>, Result<Vec<Mapping>, BString>>;
#[derive(Debug)]
pub struct Mapping {
pub remote: BString,
/// `None` if there is no destination/tracking branch
pub local: Option<BString>,
}
pub fn input() -> impl ExactSizeIterator<Item = gix_refspec::match_group::Item<'static>> + Clone {
INPUT.iter().map(Ref::to_item)
}
pub fn of_objects_with_destinations_are_written_into_given_local_branches<'a, 'b>(
specs: impl IntoIterator<Item = &'a str> + Clone,
expected: impl IntoIterator<Item = &'b str>,
) {
agrees_and_applies_fixes(specs, Vec::new(), expected);
}
pub fn agrees_and_applies_fixes<'a, 'b>(
specs: impl IntoIterator<Item = &'a str> + Clone,
fixes: impl IntoIterator<Item = Fix>,
expected: impl IntoIterator<Item = &'b str>,
) {
check_fetch_remote(
specs,
Mode::Custom {
expected: expected
.into_iter()
.map(|s| {
let spec = gix_refspec::parse(s.into(), Operation::Fetch).expect("valid spec");
Mapping {
remote: spec.source().unwrap().into(),
local: spec.destination().map(ToOwned::to_owned),
}
})
.collect(),
fixes: fixes.into_iter().collect(),
},
);
}
pub fn of_objects_always_matches_if_the_server_has_the_object<'a, 'b>(
specs: impl IntoIterator<Item = &'a str> + Clone,
) {
check_fetch_remote(specs, Mode::Normal { validate_err: None });
}
pub fn agrees_with_fetch_specs<'a>(specs: impl IntoIterator<Item = &'a str> + Clone) {
check_fetch_remote(specs, Mode::Normal { validate_err: None });
}
pub fn agrees_with_fetch_specs_validation_error<'a>(
specs: impl IntoIterator<Item = &'a str> + Clone,
validate_err: impl Into<String>,
) {
check_fetch_remote(
specs,
Mode::Normal {
validate_err: Some(validate_err.into()),
},
);
}
pub fn invalid_specs_fail_to_parse_where_git_shows_surprising_behaviour<'a>(
specs: impl IntoIterator<Item = &'a str>,
err: gix_refspec::parse::Error,
) {
let err = err.to_string();
let mut saw_err = false;
for spec in specs {
match gix_refspec::parse(spec.into(), Operation::Fetch) {
Ok(_) => {}
Err(e) if e.to_string() == err => {
saw_err = true;
}
Err(err) => panic!("Unexpected parse error: {err:?}"),
}
}
assert!(saw_err, "Failed to see error when parsing specs: {err:?}");
}
/// Here we checked by hand which refs are actually written with a particular refspec
pub fn agrees_but_observable_refs_are_vague<'a, 'b>(
specs: impl IntoIterator<Item = &'a str> + Clone,
expected: impl IntoIterator<Item = &'b str>,
) {
of_objects_with_destinations_are_written_into_given_local_branches(specs, expected);
}
enum Mode {
Normal { validate_err: Option<String> },
Custom { expected: Vec<Mapping>, fixes: Vec<Fix> },
}
fn check_fetch_remote<'a>(specs: impl IntoIterator<Item = &'a str> + Clone, mode: Mode) {
let match_group = MatchGroup::from_fetch_specs(
specs
.clone()
.into_iter()
.map(|spec| gix_refspec::parse(spec.into(), Operation::Fetch).unwrap()),
);
let key: Vec<_> = specs.into_iter().map(BString::from).collect();
let expected = BASELINE
.get(&key)
.unwrap_or_else(|| panic!("BUG: Need {key:?} added to the baseline"))
.as_ref();
let actual = match_group.match_lhs(input()).validated();
let (actual, expected) = match &mode {
Mode::Normal { validate_err } => match validate_err {
Some(err_message) => {
assert_eq!(actual.unwrap_err().to_string(), *err_message);
return;
}
None => {
let (actual, fixed) = actual.unwrap();
assert_eq!(
fixed,
Vec::<gix_refspec::match_group::validate::Fix>::new(),
"we don't expect any issues to be fixed here"
);
(actual.mappings, expected.expect("no error"))
}
},
Mode::Custom {
expected,
fixes: expected_fixes,
} => {
let (actual, actual_fixes) = actual.unwrap();
assert_eq!(&actual_fixes, expected_fixes);
(actual.mappings, expected)
}
};
assert_eq!(
actual.len(),
expected.len(),
"got a different amount of mappings: {actual:?} != {expected:?}"
);
for (idx, (actual, expected)) in actual.iter().zip(expected).enumerate() {
assert_eq!(
source_to_bstring(&actual.lhs),
expected.remote,
"{idx}: remote mismatch"
);
if let Some(expected) = expected.local.as_ref() {
match actual.rhs.as_ref() {
None => panic!("{idx}: Expected local ref to be {expected}, got none"),
Some(actual) => assert_eq!(actual.as_ref(), expected, "{idx}: mismatched local ref"),
}
}
}
}
fn source_to_bstring(source: &SourceRef) -> BString {
match source {
SourceRef::FullName(name) => name.as_ref().into(),
SourceRef::ObjectId(id) => id.to_string().into(),
}
}
fn parse_input() -> crate::Result<Vec<Ref>> {
let dir = gix_testtools::scripted_fixture_read_only("match_baseline.sh")?;
let refs_buf = std::fs::read(dir.join("clone").join("remote-refs.list"))?;
let mut out = Vec::new();
for line in refs_buf.lines() {
if line.starts_with(b"From ") {
continue;
}
let mut tokens = line.splitn(2, |b| *b == b'\t');
let target = ObjectId::from_hex(tokens.next().expect("hex-sha"))?;
let name = tokens.next().expect("name");
if !name.ends_with(b"^{}") {
out.push(Ref {
name: name.into(),
target,
object: None,
});
} else {
out.last_mut().unwrap().object = Some(target);
}
}
Ok(out)
}
pub(crate) fn parse() -> crate::Result<Baseline> {
let dir = gix_testtools::scripted_fixture_read_only("match_baseline.sh")?;
let buf = std::fs::read(dir.join("clone").join("baseline.git"))?;
let mut map = HashMap::new();
let mut mappings = Vec::new();
let mut fatal = None;
for line in buf.lines() {
if line.starts_with(b"From ") {
continue;
}
match line.strip_prefix(b"specs: ") {
Some(specs) => {
let key: Vec<_> = specs.split(|b| *b == b' ').map(BString::from).collect();
let value = match fatal.take() {
Some(message) => Err(message),
None => Ok(std::mem::take(&mut mappings)),
};
map.insert(key, value);
}
None => match line.strip_prefix(b"fatal: ") {
Some(message) => {
fatal = Some(message.into());
}
None => match line.strip_prefix(b"error: * Ignoring funny ref") {
Some(_) => continue,
None => {
let past_note = line
.splitn(2, |b| *b == b']')
.nth(1)
.or_else(|| line.strip_prefix(b" * branch "))
.or_else(|| line.strip_prefix(b" * tag "))
.unwrap_or_else(|| panic!("line unhandled: {:?}", line.as_bstr()));
let mut tokens = past_note.split(|b| *b == b' ').filter(|t| !t.is_empty());
let mut lhs = tokens.next().unwrap().trim();
if lhs.as_bstr() == "->" {
lhs = "HEAD".as_bytes();
} else {
tokens.next();
}
let rhs = tokens.next().unwrap().trim();
let local = (rhs != b"FETCH_HEAD").then(|| full_tracking_ref(rhs.into()));
if !(lhs.as_bstr() == "HEAD" && local.is_none()) {
mappings.push(Mapping {
remote: full_remote_ref(lhs.into()),
local,
});
}
}
},
},
}
}
Ok(map)
}
fn looks_like_tag(name: &BString) -> bool {
name.starts_with(b"v0.") || name.starts_with(b"annotated-v0.")
}
fn full_remote_ref(mut name: BString) -> BString {
if !name.contains(&b'/') || name.starts_with(b"sub/") || name.starts_with(b"suub/") {
if looks_like_tag(&name) {
name.insert_str(0, b"refs/tags/");
} else if let Ok(_id) = gix_hash::ObjectId::from_hex(name.as_ref()) {
// keep as is
} else if name != "HEAD" {
name.insert_str(0, b"refs/heads/");
}
}
name
}
fn full_tracking_ref(mut name: BString) -> BString {
if name.starts_with_str(b"origin/") || name.starts_with_str("new-origin/") {
name.insert_str(0, b"refs/remotes/");
} else if looks_like_tag(&name) {
name.insert_str(0, b"refs/tags/");
}
name
}
}

View File

@@ -0,0 +1,215 @@
use gix_refspec::{
instruction::Fetch,
parse::{Error, Operation},
Instruction,
};
use crate::parse::{assert_parse, b, try_parse};
#[test]
fn revspecs_are_disallowed() {
for spec in ["main~1", "^@^{}", "HEAD:main~1"] {
assert!(matches!(
try_parse(spec, Operation::Fetch).unwrap_err(),
Error::ReferenceName(_)
));
}
}
#[test]
fn object_hash_as_source() {
assert_parse(
"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391:",
Instruction::Fetch(Fetch::Only {
src: b("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"),
}),
);
}
#[test]
fn object_hash_destination_are_valid_as_they_might_be_a_strange_partial_branch_name() {
assert_parse(
"a:e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
Instruction::Fetch(Fetch::AndUpdate {
src: b("a"),
dst: b("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"),
allow_non_fast_forward: false,
}),
);
}
#[test]
fn negative_must_not_be_empty() {
assert!(matches!(
try_parse("^", Operation::Fetch).unwrap_err(),
Error::NegativeEmpty
));
}
#[test]
fn negative_must_not_be_object_hash() {
assert!(matches!(
try_parse("^e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", Operation::Fetch).unwrap_err(),
Error::NegativeObjectHash
));
}
#[test]
fn negative_with_destination() {
for spec in ["^a:b", "^a:", "^:", "^:b"] {
assert!(matches!(
try_parse(spec, Operation::Fetch).unwrap_err(),
Error::NegativeWithDestination
));
}
}
#[test]
fn exclude() {
assert!(matches!(
try_parse("^a", Operation::Fetch).unwrap_err(),
Error::NegativePartialName
));
assert!(matches!(
try_parse("^a*", Operation::Fetch).unwrap_err(),
Error::NegativeGlobPattern
));
assert_parse(
"^refs/heads/a",
Instruction::Fetch(Fetch::Exclude { src: b("refs/heads/a") }),
);
}
#[test]
fn ampersand_is_resolved_to_head() {
assert_parse("@", Instruction::Fetch(Fetch::Only { src: b("HEAD") }));
assert_parse("+@", Instruction::Fetch(Fetch::Only { src: b("HEAD") }));
assert_parse("^@", Instruction::Fetch(Fetch::Exclude { src: b("HEAD") }));
}
#[test]
fn lhs_colon_empty_fetches_only() {
assert_parse("src:", Instruction::Fetch(Fetch::Only { src: b("src") }));
assert_parse("+src:", Instruction::Fetch(Fetch::Only { src: b("src") }));
}
#[test]
fn lhs_colon_rhs_updates_single_ref() {
assert_parse(
"a:b",
Instruction::Fetch(Fetch::AndUpdate {
src: b("a"),
dst: b("b"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+a:b",
Instruction::Fetch(Fetch::AndUpdate {
src: b("a"),
dst: b("b"),
allow_non_fast_forward: true,
}),
);
assert_parse(
"a/*:b/*",
Instruction::Fetch(Fetch::AndUpdate {
src: b("a/*"),
dst: b("b/*"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+a/*:b/*",
Instruction::Fetch(Fetch::AndUpdate {
src: b("a/*"),
dst: b("b/*"),
allow_non_fast_forward: true,
}),
);
}
#[test]
fn empty_lhs_colon_rhs_fetches_head_to_destination() {
assert_parse(
":a",
Instruction::Fetch(Fetch::AndUpdate {
src: b("HEAD"),
dst: b("a"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+:a",
Instruction::Fetch(Fetch::AndUpdate {
src: b("HEAD"),
dst: b("a"),
allow_non_fast_forward: true,
}),
);
}
#[test]
fn colon_alone_is_for_fetching_head_into_fetchhead() {
assert_parse(":", Instruction::Fetch(Fetch::Only { src: b("HEAD") }));
assert_parse("+:", Instruction::Fetch(Fetch::Only { src: b("HEAD") }));
}
#[test]
fn ampersand_on_left_hand_side_is_head() {
assert_parse("@:", Instruction::Fetch(Fetch::Only { src: b("HEAD") }));
assert_parse(
"@:HEAD",
Instruction::Fetch(Fetch::AndUpdate {
src: b("HEAD"),
dst: b("HEAD"),
allow_non_fast_forward: false,
}),
);
}
#[test]
fn empty_refspec_is_enough_for_fetching_head_into_fetchhead() {
assert_parse("", Instruction::Fetch(Fetch::Only { src: b("HEAD") }));
}
#[test]
fn complex_glob_patterns_are_allowed_in_one_sided_refspecs() {
// Complex patterns with multiple asterisks should work for one-sided refspecs
assert_parse(
"refs/*/foo/*",
Instruction::Fetch(Fetch::Only { src: b("refs/*/foo/*") }),
);
assert_parse(
"+refs/heads/*/release/*",
Instruction::Fetch(Fetch::Only {
src: b("refs/heads/*/release/*"),
}),
);
// Even more complex patterns
assert_parse(
"refs/*/*/branch",
Instruction::Fetch(Fetch::Only {
src: b("refs/*/*/branch"),
}),
);
}
#[test]
fn complex_glob_patterns_still_fail_for_two_sided_refspecs() {
// Two-sided refspecs with complex patterns (multiple asterisks) should still fail
for spec in [
"refs/*/foo/*:refs/remotes/origin/*",
"refs/*/*:refs/remotes/*",
"a/*/c/*:b/*",
] {
assert!(matches!(
try_parse(spec, Operation::Fetch).unwrap_err(),
Error::PatternUnsupported { .. }
));
}
}

View File

@@ -0,0 +1,83 @@
use gix_refspec::parse::{Error, Operation};
use crate::parse::try_parse;
#[test]
fn empty() {
assert!(matches!(try_parse("", Operation::Push).unwrap_err(), Error::Empty));
}
#[test]
fn empty_component() {
assert!(matches!(
try_parse("refs/heads/test:refs/remotes//test", Operation::Fetch).unwrap_err(),
Error::ReferenceName(gix_validate::reference::name::Error::RepeatedSlash)
));
}
#[test]
fn whitespace() {
assert!(matches!(
try_parse("refs/heads/test:refs/remotes/ /test", Operation::Fetch).unwrap_err(),
Error::ReferenceName(gix_validate::reference::name::Error::InvalidByte { .. })
));
}
#[test]
fn complex_patterns_with_more_than_one_asterisk() {
// For one-sided refspecs, complex patterns are now allowed
for op in [Operation::Fetch, Operation::Push] {
assert!(try_parse("a/*/c/*", op).is_ok());
}
// For two-sided refspecs, complex patterns should still fail
for op in [Operation::Fetch, Operation::Push] {
for spec in ["a/*/c/*:x/*/y/*", "a**:**b", "+:**/"] {
assert!(matches!(
try_parse(spec, op).unwrap_err(),
Error::PatternUnsupported { .. }
));
}
}
// Negative specs with multiple patterns still fail
assert!(matches!(
try_parse("^*/*", Operation::Fetch).unwrap_err(),
Error::NegativeGlobPattern
));
}
#[test]
fn both_sides_need_pattern_if_one_uses_it() {
// For two-sided refspecs, both sides still need patterns if one uses it
for op in [Operation::Fetch, Operation::Push] {
for spec in [":a/*", "+:a/*", "a*:b/c", "a:b/*"] {
assert!(
matches!(try_parse(spec, op).unwrap_err(), Error::PatternUnbalanced),
"{}",
spec
);
}
}
// One-sided refspecs with patterns are now allowed
for op in [Operation::Fetch, Operation::Push] {
assert!(try_parse("refs/*/a", op).is_ok());
}
}
#[test]
fn push_to_empty() {
assert!(matches!(
try_parse("HEAD:", Operation::Push).unwrap_err(),
Error::PushToEmpty
));
}
#[test]
fn fuzzed() {
let input =
include_bytes!("../../fixtures/fuzzed/clusterfuzz-testcase-minimized-src-refspec-parse-4658733962887168");
drop(gix_refspec::parse(input.into(), gix_refspec::parse::Operation::Fetch).unwrap_err());
drop(gix_refspec::parse(input.into(), gix_refspec::parse::Operation::Push).unwrap_err());
}

View File

@@ -0,0 +1,117 @@
use std::panic::catch_unwind;
use bstr::ByteSlice;
use gix_refspec::parse::Operation;
use gix_testtools::scripted_fixture_read_only;
#[test]
fn baseline() {
let dir = scripted_fixture_read_only("parse_baseline.sh").unwrap();
let baseline = std::fs::read(dir.join("baseline.git")).unwrap();
let mut lines = baseline.lines();
let mut panics = 0;
let mut mismatch = 0;
let mut count = 0;
while let Some(kind_spec) = lines.next() {
count += 1;
let (kind, spec) = kind_spec.split_at(kind_spec.find_byte(b' ').expect("space between kind and spec"));
let spec = &spec[1..];
let err_code: usize = lines
.next()
.expect("err code")
.to_str()
.unwrap()
.parse()
.expect("number");
let op = match kind {
b"fetch" => Operation::Fetch,
b"push" => Operation::Push,
_ => unreachable!("{} unexpected", kind.as_bstr()),
};
let res = catch_unwind(|| try_parse(spec.to_str().unwrap(), op));
match &res {
Ok(res) => match (res.is_ok(), err_code == 0) {
(true, true) | (false, false) => {
if let Ok(spec) = res {
spec.instruction(); // should not panic
}
}
_ => {
match (res.as_ref().err(), err_code == 0) {
(
Some(
gix_refspec::parse::Error::NegativePartialName
| gix_refspec::parse::Error::NegativeGlobPattern,
),
true,
) => {} // we prefer failing fast, git let's it pass
// We now allow complex glob patterns in one-sided refspecs
(None, false) if is_one_sided_glob_pattern(spec, op) => {
// This is an intentional behavior change: we allow complex globs in one-sided refspecs
}
_ => {
eprintln!("{err_code} {res:?} {} {:?}", kind.as_bstr(), spec.as_bstr());
mismatch += 1;
}
}
}
},
Err(_) => {
panics += 1;
}
}
}
if panics != 0 || mismatch != 0 {
panic!(
"Out of {} baseline entries, got {} right, ({} mismatches and {} panics)",
count,
count - (mismatch + panics),
mismatch,
panics
);
}
fn is_one_sided_glob_pattern(spec: &[u8], op: Operation) -> bool {
use bstr::ByteSlice;
matches!(op, Operation::Fetch)
&& spec
.to_str()
.map(|s| s.contains('*') && !s.contains(':'))
.unwrap_or(false)
}
}
#[test]
fn local_and_remote() -> crate::Result {
let spec = gix_refspec::parse("remote:local".into(), Operation::Fetch)?;
assert_eq!(spec.remote(), spec.source());
assert_eq!(spec.local(), spec.destination());
let spec = gix_refspec::parse("local:remote".into(), Operation::Push)?;
assert_eq!(spec.local(), spec.source());
assert_eq!(spec.remote(), spec.destination());
Ok(())
}
mod fetch;
mod invalid;
mod push;
mod util {
use gix_refspec::{parse::Operation, Instruction, RefSpecRef};
pub fn b(input: &str) -> &bstr::BStr {
input.into()
}
pub fn try_parse(spec: &str, op: Operation) -> Result<RefSpecRef<'_>, gix_refspec::parse::Error> {
gix_refspec::parse(spec.into(), op)
}
pub fn assert_parse<'a>(spec: &'a str, expected: Instruction<'_>) -> RefSpecRef<'a> {
let spec = try_parse(spec, expected.operation()).expect("no error");
assert_eq!(spec.instruction(), expected);
spec
}
}
pub use util::*;

View File

@@ -0,0 +1,163 @@
use crate::parse::{assert_parse, b, try_parse};
use gix_refspec::{
instruction::Push,
parse::{Error, Operation},
Instruction,
};
#[test]
fn negative_must_not_be_empty() {
assert!(matches!(
try_parse("^", Operation::Push).unwrap_err(),
Error::NegativeEmpty
));
}
#[test]
fn negative_must_not_be_object_hash() {
assert!(matches!(
try_parse("^e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", Operation::Push).unwrap_err(),
Error::NegativeObjectHash
));
}
#[test]
fn negative_with_destination() {
for spec in ["^a:b", "^a:", "^:", "^:b"] {
assert!(matches!(
try_parse(spec, Operation::Push).unwrap_err(),
Error::NegativeWithDestination
));
}
}
#[test]
fn exclude() {
assert!(matches!(
try_parse("^a", Operation::Push).unwrap_err(),
Error::NegativePartialName
));
assert!(matches!(
try_parse("^a*", Operation::Push).unwrap_err(),
Error::NegativeGlobPattern
));
assert_parse(
"^refs/heads/a",
Instruction::Push(Push::Exclude { src: b("refs/heads/a") }),
);
}
#[test]
fn revspecs_with_ref_name_destination() {
assert_parse(
"main~1:b",
Instruction::Push(Push::Matching {
src: b("main~1"),
dst: b("b"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+main~1:b",
Instruction::Push(Push::Matching {
src: b("main~1"),
dst: b("b"),
allow_non_fast_forward: true,
}),
);
}
#[test]
fn destinations_must_be_ref_names() {
assert!(matches!(
try_parse("a~1:b~1", Operation::Push).unwrap_err(),
Error::ReferenceName(_)
));
}
#[test]
fn single_refs_must_be_refnames() {
assert!(matches!(
try_parse("a~1", Operation::Push).unwrap_err(),
Error::ReferenceName(_)
));
}
#[test]
fn ampersand_is_resolved_to_head() {
assert_parse(
"@",
Instruction::Push(Push::Matching {
src: b("HEAD"),
dst: b("HEAD"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+@",
Instruction::Push(Push::Matching {
src: b("HEAD"),
dst: b("HEAD"),
allow_non_fast_forward: true,
}),
);
}
#[test]
fn lhs_colon_rhs_pushes_single_ref() {
assert_parse(
"a:b",
Instruction::Push(Push::Matching {
src: b("a"),
dst: b("b"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+a:b",
Instruction::Push(Push::Matching {
src: b("a"),
dst: b("b"),
allow_non_fast_forward: true,
}),
);
assert_parse(
"a/*:b/*",
Instruction::Push(Push::Matching {
src: b("a/*"),
dst: b("b/*"),
allow_non_fast_forward: false,
}),
);
assert_parse(
"+a/*:b/*",
Instruction::Push(Push::Matching {
src: b("a/*"),
dst: b("b/*"),
allow_non_fast_forward: true,
}),
);
}
#[test]
fn colon_alone_is_for_pushing_matching_refs() {
assert_parse(
":",
Instruction::Push(Push::AllMatchingBranches {
allow_non_fast_forward: false,
}),
);
assert_parse(
"+:",
Instruction::Push(Push::AllMatchingBranches {
allow_non_fast_forward: true,
}),
);
}
#[test]
fn delete() {
assert_parse(":a", Instruction::Push(Push::Delete { ref_or_pattern: b("a") }));
assert_parse("+:a", Instruction::Push(Push::Delete { ref_or_pattern: b("a") }));
}

View File

@@ -0,0 +1,131 @@
mod prefix {
use gix_refspec::{parse::Operation, RefSpec};
#[test]
fn head_is_specifically_known() {
assert_eq!(parse("HEAD").to_ref().prefix().unwrap(), "HEAD");
}
#[test]
fn partial_refs_have_no_prefix() {
assert_eq!(parse("main").to_ref().prefix(), None);
}
#[test]
fn negative_specs_have_no_prefix() {
assert_eq!(parse("^refs/heads/main").to_ref().prefix(), None);
}
#[test]
fn short_absolute_refs_have_no_prefix() {
assert_eq!(parse("refs/short").to_ref().prefix(), None);
}
#[test]
fn push_specs_use_the_destination() {
assert_eq!(
gix_refspec::parse("refs/local/main:refs/remote/main".into(), Operation::Push)
.unwrap()
.prefix()
.unwrap(),
"refs/remote/"
);
}
#[test]
fn full_names_have_a_prefix() {
assert_eq!(parse("refs/heads/main").to_ref().prefix().unwrap(), "refs/heads/");
assert_eq!(parse("refs/foo/bar").to_ref().prefix().unwrap(), "refs/foo/");
assert_eq!(
parse("refs/heads/*:refs/remotes/origin/*").to_ref().prefix().unwrap(),
"refs/heads/"
);
}
#[test]
fn strange_glob_patterns_have_no_prefix() {
assert_eq!(parse("refs/*/main:refs/*/main").to_ref().prefix(), None);
}
#[test]
fn object_names_have_no_prefix() {
assert_eq!(
parse("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391").to_ref().prefix(),
None
);
}
fn parse(spec: &str) -> RefSpec {
gix_refspec::parse(spec.into(), Operation::Fetch).unwrap().to_owned()
}
}
mod expand_prefixes {
use gix_refspec::parse::Operation;
#[test]
fn head_is_specifically_known() {
assert_eq!(parse("HEAD"), ["HEAD"]);
}
#[test]
fn partial_refs_have_many_prefixes() {
assert_eq!(
parse("main"),
[
"main",
"refs/main",
"refs/tags/main",
"refs/heads/main",
"refs/remotes/main",
"refs/remotes/main/HEAD"
]
);
}
#[test]
fn negative_specs_have_no_prefix() {
assert_eq!(parse("^refs/heads/main").len(), 0);
}
#[test]
fn short_absolute_refs_expand_to_themselves() {
assert_eq!(parse("refs/short"), ["refs/short"]);
}
#[test]
fn full_names_expand_to_their_prefix() {
assert_eq!(parse("refs/heads/main"), ["refs/heads/"]);
assert_eq!(parse("refs/foo/bar"), ["refs/foo/"]);
assert_eq!(parse("refs/heads/*:refs/remotes/origin/*"), ["refs/heads/"]);
}
#[test]
fn push_specs_use_the_destination() {
let mut out = Vec::new();
gix_refspec::parse("refs/local/main:refs/remote/main".into(), Operation::Push)
.unwrap()
.expand_prefixes(&mut out);
assert_eq!(out, ["refs/remote/"]);
}
#[test]
fn strange_glob_patterns_expand_to_nothing() {
assert_eq!(parse("refs/*/main:refs/*/main").len(), 0);
}
#[test]
fn object_names_expand_to_nothing() {
assert_eq!(parse("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391").len(), 0);
}
fn parse(spec: &str) -> Vec<String> {
let mut out = Vec::new();
gix_refspec::parse(spec.into(), Operation::Fetch)
.unwrap()
.to_owned()
.to_ref()
.expand_prefixes(&mut out);
out.into_iter().map(|b| b.to_string()).collect()
}
}

View File

@@ -0,0 +1,98 @@
mod push {
use gix_refspec::{instruction, Instruction};
#[test]
fn all_matching_branches() {
assert_eq!(
Instruction::Push(instruction::Push::AllMatchingBranches {
allow_non_fast_forward: false
})
.to_bstring(),
":"
);
assert_eq!(
Instruction::Push(instruction::Push::AllMatchingBranches {
allow_non_fast_forward: true
})
.to_bstring(),
"+:"
);
}
#[test]
fn delete() {
assert_eq!(
Instruction::Push(instruction::Push::Delete {
ref_or_pattern: "for-deletion".into(),
})
.to_bstring(),
":for-deletion"
);
}
#[test]
fn matching() {
assert_eq!(
Instruction::Push(instruction::Push::Matching {
src: "from".into(),
dst: "to".into(),
allow_non_fast_forward: false
})
.to_bstring(),
"from:to"
);
assert_eq!(
Instruction::Push(instruction::Push::Matching {
src: "from".into(),
dst: "to".into(),
allow_non_fast_forward: true
})
.to_bstring(),
"+from:to"
);
}
}
mod fetch {
use gix_refspec::{instruction, Instruction};
#[test]
fn only() {
assert_eq!(
Instruction::Fetch(instruction::Fetch::Only {
src: "refs/heads/main".into(),
})
.to_bstring(),
"refs/heads/main"
);
}
#[test]
fn exclude() {
assert_eq!(
Instruction::Fetch(instruction::Fetch::Exclude { src: "excluded".into() }).to_bstring(),
"^excluded"
);
}
#[test]
fn and_update() {
assert_eq!(
Instruction::Fetch(instruction::Fetch::AndUpdate {
src: "from".into(),
dst: "to".into(),
allow_non_fast_forward: false
})
.to_bstring(),
"from:to"
);
assert_eq!(
Instruction::Fetch(instruction::Fetch::AndUpdate {
src: "from".into(),
dst: "to".into(),
allow_non_fast_forward: true
})
.to_bstring(),
"+from:to"
);
}
}