Radish alpha
r
rad:z6cFWeWpnZNHh9rUW8phgA3b5yGt
Git libraries for Radicle
Radicle
Git
radicle-git init
Fintan Halpenny committed 3 years ago
commit 70b571b67bb4b26c8e95df5f13fae670f5cf3476
parent c434209
112 files changed +10341 -7
modified .gitignore
@@ -1,10 +1,6 @@
-
# Generated by Cargo
-
# will have compiled files and executables
-
/target/
-

-
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
-
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
+
target
+
.vscode
+
tags
Cargo.lock
-

# These are backup files generated by rustfmt
**/*.rs.bk
added .rustfmt.toml
@@ -0,0 +1,29 @@
+
max_width = 100
+
# yeap
+
comment_width = 80
+
wrap_comments = true
+
hard_tabs = false
+
tab_spaces = 4
+
imports_layout = "HorizontalVertical"
+
imports_granularity = "Crate"
+

+
newline_style = "Unix"
+
use_small_heuristics = "Default"
+

+
reorder_imports = true
+
reorder_modules = true
+

+
remove_nested_parens = true
+

+
fn_args_layout = "Tall"
+

+
edition = "2018"
+

+
match_block_trailing_comma = true
+

+
merge_derives = true
+

+
use_try_shorthand = false
+
use_field_init_shorthand = false
+

+
force_explicit_abi = true
added Cargo.toml
@@ -0,0 +1,12 @@
+
[workspace]
+
members = [
+
  "git-ext",
+
  "git-ref-format",
+
  "git-trailers",
+
  "link-git",
+
  "macros",
+
  # TODO: port gitd-lib over
+
  # "cli/gitd-lib",
+
  "std-ext",
+
  "test",
+
]
added default.nix
@@ -0,0 +1,31 @@
+
{ sources ? import ./nix/sources.nix
+
, pkgs ? import sources.nixpkgs {
+
    overlays = [ (import sources.rust-overlay) ];
+
  }
+
, rust-overlay ? pkgs.rust-bin.stable.latest.default
+
}:
+
let
+
  # TODO: remove once cargo-nextest is available in nixpkgs stable
+
  cargo-nextest = (pkgs.callPackage ./nix/cargo-nextest/default.nix { });
+
in
+
  with pkgs;
+
  mkShell {
+
    name = "build";
+
    buildInputs = [
+
        # cargo tooling
+
        cargo-deny
+
        cargo-nextest
+
        cargo-watch
+
        pkgs.rust-bin.nightly."2022-07-01".rustfmt
+

+
        # hard dependencies
+
        cmake
+
        openssl
+
        pkgconfig
+
        rust-overlay
+

+
        # testing utilities
+
        gettext # for `envsubst`
+
        socat
+
    ];
+
  }
added deny.toml
@@ -0,0 +1,168 @@
+
# This section is considered when running `cargo deny check advisories`
+
# More documentation for the advisories section can be found here:
+
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
+
[advisories]
+
# The path where the advisory database is cloned/fetched into
+
db-path = "~/cargo/advisory-db"
+
# The url of the advisory database to use
+
db-urls = [ "https://github.com/rustsec/advisory-db" ]
+
# The lint level for security vulnerabilities
+
vulnerability = "deny"
+
# The lint level for unmaintained crates
+
unmaintained = "warn"
+
# The lint level for crates that have been yanked from their source registry
+
yanked = "warn"
+
# The lint level for crates with security notices. Note that as of
+
# 2019-12-17 there are no security notice advisories in
+
# https://github.com/rustsec/advisory-db
+
notice = "warn"
+
# A list of advisory IDs to ignore. Note that ignored advisories will still
+
# output a note when they are encountered.
+
ignore = [
+
    #"RUSTSEC-0000-0000",
+
]
+
# Threshold for security vulnerabilities, any vulnerability with a CVSS score
+
# lower than the range specified will be ignored. Note that ignored advisories
+
# will still output a note when they are encountered.
+
# * None - CVSS Score 0.0
+
# * Low - CVSS Score 0.1 - 3.9
+
# * Medium - CVSS Score 4.0 - 6.9
+
# * High - CVSS Score 7.0 - 8.9
+
# * Critical - CVSS Score 9.0 - 10.0
+
#severity-threshold =
+

+
# This section is considered when running `cargo deny check licenses`
+
# More documentation for the licenses section can be found here:
+
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
+
[licenses]
+
# The lint level for crates which do not have a detectable license
+
unlicensed = "deny"
+
# List of explictly allowed licenses
+
# See https://spdx.org/licenses/ for list of possible licenses
+
# [possible values: any SPDX 3.7 short identifier (+ optional exception)].
+
allow = [
+
    "Apache-2.0",
+
    "BlueOak-1.0.0",
+
    "BSD-2-Clause",
+
    "CC0-1.0",
+
    "GPL-3.0",
+
    "MIT",
+
    "Unlicense",
+
]
+
# List of explictly disallowed licenses
+
# See https://spdx.org/licenses/ for list of possible licenses
+
# [possible values: any SPDX 3.7 short identifier (+ optional exception)].
+
deny = []
+
# Lint level for licenses considered copyleft
+
copyleft = "allow"
+
# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses
+
# * both - The license will be approved if it is both OSI-approved *AND* FSF
+
# * either - The license will be approved if it is either OSI-approved *OR* FSF
+
# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF
+
# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved
+
# * neither - This predicate is ignored and the default lint level is used
+
allow-osi-fsf-free = "both"
+
# Lint level used when no other predicates are matched
+
# 1. License isn't in the allow or deny lists
+
# 2. License isn't copyleft
+
# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither"
+
default = "deny"
+
# The confidence threshold for detecting a license from license text.
+
# The higher the value, the more closely the license text must be to the
+
# canonical license text of a valid SPDX license file.
+
# [possible values: any between 0.0 and 1.0].
+
confidence-threshold = 0.8
+
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
+
# aren't accepted for every possible crate as with the normal allow list
+
exceptions = [
+
    # Technically, the `ring` crate is "ISC AND MIT AND OpenSSL", but the rust
+
    # code is ISC (mostly). The OpenSSL license is not compatible with GPLv3.
+
    # For the sake of sanity, we will need to either get rid of TLS, or
+
    # re-license.
+
    { allow = ["ISC", "MIT", "OpenSSL"], name = "ring" },
+

+
    # The Unicode-DFS--2016 license is necessary for unicode-ident because they
+
    # use data from the unicode tables to generate the tables which are
+
    # included in the application. We do not distribute those data files so
+
    # this is not a problem for us. See
+
    # https://github.com/dtolnay/unicode-ident/pull/9/files for more details.
+
    { allow = ["MIT", "Apache-2.0", "Unicode-DFS-2016"], name = "unicode-ident" },
+
]
+

+
# Some crates don't have (easily) machine readable licensing information,
+
# adding a clarification entry for it allows you to manually specify the
+
# licensing information
+
[[licenses.clarify]]
+
name = "ring"
+
expression = "ISC AND MIT AND OpenSSL"
+
license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }]
+

+
[licenses.private]
+
# If true, ignores workspace crates that aren't published, or are only
+
# published to private registries
+
ignore = false
+
# One or more private registries that you might publish crates to, if a crate
+
# is only published to private registries, and ignore is true, the crate will
+
# not have its license(s) checked
+
registries = [
+
    #"https://sekretz.com/registry
+
]
+

+
# This section is considered when running `cargo deny check bans`.
+
# More documentation about the 'bans' section can be found here:
+
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
+
[bans]
+
# Lint level for when multiple versions of the same crate are detected
+
multiple-versions = "warn"
+
# The graph highlighting used when creating dotgraphs for crates
+
# with multiple versions
+
# * lowest-version - The path to the lowest versioned duplicate is highlighted
+
# * simplest-path - The path to the version with the fewest edges is highlighted
+
# * all - Both lowest-version and simplest-path are used
+
highlight = "all"
+
# List of crates that are allowed. Use with care!
+
allow = [
+
    #{ name = "ansi_term", version = "=0.11.0" },
+
]
+
# List of crates to deny
+
deny = [
+
    # Each entry the name of a crate and a version range. If version is
+
    # not specified, all versions will be matched.
+
    #{ name = "ansi_term", version = "=0.11.0" },
+
    { name = "openssl-probe" },
+
    { name = "openssl-sys" },
+

+
    # Pinned crypto libs
+
    # See radicle-keystore @ 00f8fb6135f8e4cd097a48e6f0700e08ce4abb04
+
    { name = "chacha20poly1305", version = "> 0.9.0" },
+
    { name = "ed25519-zebra", version = "> 3.0.0" },
+
    { name = "curve25519-dalek", version = "> 3.2.1" },
+
    { name = "scrypt", version = "> 0.8.0" },
+
]
+
# Certain crates/versions that will be skipped when doing duplicate detection.
+
skip = [
+
    #{ name = "ansi_term", version = "=0.11.0" },
+
]
+
# Similarly to `skip` allows you to skip certain crates during duplicate
+
# detection. Unlike skip, it also includes the entire tree of transitive
+
# dependencies starting at the specified crate, up to a certain depth, which is
+
# by default infinite
+
skip-tree = [
+
    #{ name = "ansi_term", version = "=0.11.0", depth = 20 },
+
]
+

+
# This section is considered when running `cargo deny check sources`.
+
# More documentation about the 'sources' section can be found here:
+
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
+
[sources]
+
# Lint level for what to happen when a crate from a crate registry that is not
+
# in the allow list is encountered
+
unknown-registry = "deny"
+
# Lint level for what to happen when a crate from a git repository that is not
+
# in the allow list is encountered
+
unknown-git = "deny"
+
# List of URLs for allowed crate registries. Defaults to the crates.io index
+
# if not specified. If it is specified but empty, no registries are allowed.
+
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
+
# List of URLs for allowed Git repositories
+
allow-git = []
added git-ext/Cargo.toml
@@ -0,0 +1,41 @@
+
[package]
+
name = "radicle-git-ext"
+
version = "0.1.0"
+
authors = ["The Radicle Team <dev@radicle.xyz>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+
description = "Utilities and extensions to the git2 crate"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[dependencies]
+
multihash = "0.11"
+
percent-encoding = "2"
+
thiserror = "1"
+

+
[dependencies.git2]
+
version = "0.13.24"
+
default-features = false
+
features = ["vendored-libgit2"]
+

+
[dependencies.git-ref-format]
+
path = "../git-ref-format"
+

+
[dependencies.link-git]
+
path = "../link-git"
+
optional = true
+

+
[dependencies.minicbor]
+
version = "0.13"
+
features = ["std"]
+
optional = true
+

+
[dependencies.serde]
+
version = "1"
+
features = ["derive"]
+
optional = true
+

+
[dependencies.radicle-std-ext]
+
path = "../std-ext"
added git-ext/src/blob.rs
@@ -0,0 +1,146 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{borrow::Cow, path::Path};
+

+
use radicle_std_ext::result::ResultExt as _;
+
use thiserror::Error;
+

+
use crate::{error::is_not_found_err, revwalk};
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum Error {
+
    #[error(transparent)]
+
    NotFound(#[from] NotFound),
+

+
    #[error(transparent)]
+
    Git(#[from] git2::Error),
+
}
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum NotFound {
+
    #[error("blob with path {0} not found")]
+
    NoSuchBlob(String),
+

+
    #[error("branch {0} not found")]
+
    NoSuchBranch(String),
+

+
    #[error("object {0} not found")]
+
    NoSuchObject(git2::Oid),
+

+
    #[error("the supplied git2::Reference does not have a target")]
+
    NoRefTarget,
+
}
+

+
pub enum Branch<'a> {
+
    Name(Cow<'a, str>),
+
    Ref(git2::Reference<'a>),
+
}
+

+
impl<'a> From<&'a str> for Branch<'a> {
+
    fn from(s: &'a str) -> Self {
+
        Self::Name(Cow::Borrowed(s))
+
    }
+
}
+

+
impl<'a> From<String> for Branch<'a> {
+
    fn from(s: String) -> Self {
+
        Self::Name(Cow::Owned(s))
+
    }
+
}
+

+
impl<'a> From<git2::Reference<'a>> for Branch<'a> {
+
    fn from(r: git2::Reference<'a>) -> Self {
+
        Self::Ref(r)
+
    }
+
}
+

+
/// Conveniently read a [`git2::Blob`] from a starting point.
+
pub enum Blob<'a> {
+
    /// Look up the tip of the reference specified by [`Branch`], peel until a
+
    /// tree is found, and traverse the tree along the given [`Path`] until
+
    /// the blob is found.
+
    Tip { branch: Branch<'a>, path: &'a Path },
+
    /// Traverse the history from the tip of [`Branch`] along the first parent
+
    /// until a commit without parents is found. Try to get the blob in that
+
    /// commit's tree at [`Path`].
+
    Init { branch: Branch<'a>, path: &'a Path },
+
    /// Look up `object`, peel until a tree is found, and try to get at the blob
+
    /// at [`Path`].
+
    At { object: git2::Oid, path: &'a Path },
+
}
+

+
impl<'a> Blob<'a> {
+
    pub fn get(self, git: &'a git2::Repository) -> Result<git2::Blob<'a>, Error> {
+
        match self {
+
            Self::Tip { branch, path } => {
+
                let reference = match branch {
+
                    Branch::Name(name) => {
+
                        git.find_reference(&name).or_matches(is_not_found_err, || {
+
                            Err(Error::NotFound(NotFound::NoSuchBranch(
+
                                name.to_owned().to_string(),
+
                            )))
+
                        })
+
                    },
+

+
                    Branch::Ref(reference) => Ok(reference),
+
                }?;
+
                let tree = reference.peel_to_tree()?;
+
                blob(git, tree, path)
+
            },
+

+
            Self::Init { branch, path } => {
+
                let start = match branch {
+
                    Branch::Name(name) => Ok(revwalk::Start::Ref(name.to_string())),
+
                    Branch::Ref(reference) => {
+
                        match (reference.target(), reference.symbolic_target()) {
+
                            (Some(oid), _) => Ok(revwalk::Start::Oid(oid)),
+
                            (_, Some(sym)) => Ok(revwalk::Start::Ref(sym.to_string())),
+
                            (_, _) => Err(Error::NotFound(NotFound::NoRefTarget)),
+
                        }
+
                    },
+
                }?;
+

+
                let revwalk = revwalk::FirstParent::new(git, start)?.reverse()?;
+
                match revwalk.into_iter().next() {
+
                    None => Err(Error::NotFound(NotFound::NoSuchBlob(
+
                        path.display().to_string(),
+
                    ))),
+
                    Some(oid) => {
+
                        let oid = oid?;
+
                        let tree = git.find_commit(oid)?.tree()?;
+
                        blob(git, tree, path)
+
                    },
+
                }
+
            },
+

+
            Self::At { object, path } => {
+
                let tree = git
+
                    .find_object(object, None)
+
                    .or_matches(is_not_found_err, || {
+
                        Err(Error::NotFound(NotFound::NoSuchObject(object)))
+
                    })
+
                    .and_then(|obj| Ok(obj.peel_to_tree()?))?;
+
                blob(git, tree, path)
+
            },
+
        }
+
    }
+
}
+

+
fn blob<'a>(
+
    repo: &'a git2::Repository,
+
    tree: git2::Tree<'a>,
+
    path: &'a Path,
+
) -> Result<git2::Blob<'a>, Error> {
+
    let entry = tree.get_path(path).or_matches(is_not_found_err, || {
+
        Err(Error::NotFound(NotFound::NoSuchBlob(
+
            path.display().to_string(),
+
        )))
+
    })?;
+

+
    entry.to_object(repo)?.peel_to_blob().map_err(Error::from)
+
}
added git-ext/src/error.rs
@@ -0,0 +1,22 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{fmt::Display, io};
+

+
pub fn is_not_found_err(e: &git2::Error) -> bool {
+
    e.code() == git2::ErrorCode::NotFound
+
}
+

+
pub fn is_exists_err(e: &git2::Error) -> bool {
+
    e.code() == git2::ErrorCode::Exists
+
}
+

+
pub fn into_git_err<E: Display>(e: E) -> git2::Error {
+
    git2::Error::from_str(&e.to_string())
+
}
+

+
pub fn into_io_err(e: git2::Error) -> io::Error {
+
    io::Error::new(io::ErrorKind::Other, e)
+
}
added git-ext/src/lib.rs
@@ -0,0 +1,22 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
//! Extensions and wrappers for `git2` types
+

+
pub mod blob;
+
pub mod error;
+
pub mod oid;
+
pub mod reference;
+
pub mod revwalk;
+
pub mod transport;
+
pub mod tree;
+

+
pub use blob::*;
+
pub use error::*;
+
pub use oid::*;
+
pub use reference::*;
+
pub use revwalk::*;
+
pub use transport::*;
+
pub use tree::Tree;
added git-ext/src/oid.rs
@@ -0,0 +1,233 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    convert::TryFrom,
+
    fmt::{self, Display},
+
    ops::Deref,
+
    str::FromStr,
+
};
+

+
use multihash::{Multihash, MultihashRef};
+
use thiserror::Error;
+

+
#[cfg(feature = "link-git")]
+
use link_git::hash as git_hash;
+

+
/// Serializable [`git2::Oid`]
+
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+
pub struct Oid(git2::Oid);
+

+
impl Oid {
+
    pub fn into_multihash(self) -> Multihash {
+
        self.into()
+
    }
+
}
+

+
#[cfg(feature = "serde")]
+
mod serde_impls {
+
    use super::*;
+
    use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
+

+
    impl Serialize for Oid {
+
        fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
        where
+
            S: Serializer,
+
        {
+
            self.0.to_string().serialize(serializer)
+
        }
+
    }
+

+
    impl<'de> Deserialize<'de> for Oid {
+
        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
        where
+
            D: Deserializer<'de>,
+
        {
+
            struct OidVisitor;
+

+
            impl<'de> Visitor<'de> for OidVisitor {
+
                type Value = Oid;
+

+
                fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
                    write!(f, "a hexidecimal git2::Oid")
+
                }
+

+
                fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+
                where
+
                    E: serde::de::Error,
+
                {
+
                    s.parse().map_err(serde::de::Error::custom)
+
                }
+
            }
+

+
            deserializer.deserialize_str(OidVisitor)
+
        }
+
    }
+
}
+

+
#[cfg(feature = "minicbor")]
+
mod minicbor_impls {
+
    use super::*;
+
    use minicbor::{
+
        decode,
+
        encode::{self, Write},
+
        Decode,
+
        Decoder,
+
        Encode,
+
        Encoder,
+
    };
+

+
    impl Encode for Oid {
+
        fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
            e.bytes(Multihash::from(self).as_bytes())?;
+
            Ok(())
+
        }
+
    }
+

+
    impl<'b> Decode<'b> for Oid {
+
        fn decode(d: &mut Decoder) -> Result<Self, decode::Error> {
+
            let bytes = d.bytes()?;
+
            let mhash = MultihashRef::from_slice(bytes)
+
                .or(Err(decode::Error::Message("not a multihash")))?;
+
            Self::try_from(mhash).or(Err(decode::Error::Message("not a git oid")))
+
        }
+
    }
+
}
+

+
impl Deref for Oid {
+
    type Target = git2::Oid;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<git2::Oid> for Oid {
+
    fn as_ref(&self) -> &git2::Oid {
+
        self
+
    }
+
}
+

+
impl AsRef<[u8]> for Oid {
+
    fn as_ref(&self) -> &[u8] {
+
        self.as_bytes()
+
    }
+
}
+

+
#[cfg(feature = "link-git")]
+
impl AsRef<git_hash::oid> for Oid {
+
    fn as_ref(&self) -> &git_hash::oid {
+
        // SAFETY: checks the length of the slice, which we know is correct
+
        git_hash::oid::try_from(self.as_bytes()).unwrap()
+
    }
+
}
+

+
impl From<git2::Oid> for Oid {
+
    fn from(oid: git2::Oid) -> Self {
+
        Self(oid)
+
    }
+
}
+

+
impl From<Oid> for git2::Oid {
+
    fn from(oid: Oid) -> Self {
+
        oid.0
+
    }
+
}
+

+
#[cfg(feature = "link-git")]
+
impl From<git_hash::ObjectId> for Oid {
+
    fn from(git_hash::ObjectId::Sha1(bs): git_hash::ObjectId) -> Self {
+
        // SAFETY: checks the length of the slice, which we statically know
+
        Self(git2::Oid::from_bytes(&bs).unwrap())
+
    }
+
}
+

+
#[cfg(feature = "link-git")]
+
impl From<Oid> for git_hash::ObjectId {
+
    fn from(oid: Oid) -> Self {
+
        Self::from_20_bytes(oid.as_ref())
+
    }
+
}
+

+
#[cfg(feature = "link-git")]
+
impl<'a> From<&'a Oid> for &'a git_hash::oid {
+
    fn from(oid: &'a Oid) -> Self {
+
        oid.as_ref()
+
    }
+
}
+

+
impl Display for Oid {
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        self.0.fmt(f)
+
    }
+
}
+

+
impl TryFrom<&str> for Oid {
+
    type Error = git2::Error;
+

+
    fn try_from(s: &str) -> Result<Self, Self::Error> {
+
        s.parse().map(Self)
+
    }
+
}
+

+
impl FromStr for Oid {
+
    type Err = git2::Error;
+

+
    fn from_str(s: &str) -> Result<Self, Self::Err> {
+
        Self::try_from(s)
+
    }
+
}
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum FromMultihashError {
+
    #[error("invalid hash algorithm: expected Sha1, got {actual:?}")]
+
    AlgorithmMismatch { actual: multihash::Code },
+

+
    #[error(transparent)]
+
    Git(#[from] git2::Error),
+
}
+

+
impl TryFrom<Multihash> for Oid {
+
    type Error = FromMultihashError;
+

+
    fn try_from(mhash: Multihash) -> Result<Self, Self::Error> {
+
        Self::try_from(mhash.as_ref())
+
    }
+
}
+

+
impl TryFrom<MultihashRef<'_>> for Oid {
+
    type Error = FromMultihashError;
+

+
    fn try_from(mhash: MultihashRef) -> Result<Self, Self::Error> {
+
        if mhash.algorithm() != multihash::Code::Sha1 {
+
            return Err(Self::Error::AlgorithmMismatch {
+
                actual: mhash.algorithm(),
+
            });
+
        }
+

+
        Self::try_from(mhash.digest()).map_err(Self::Error::from)
+
    }
+
}
+

+
impl TryFrom<&[u8]> for Oid {
+
    type Error = git2::Error;
+

+
    fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
+
        git2::Oid::from_bytes(bytes).map(Self)
+
    }
+
}
+

+
impl From<Oid> for Multihash {
+
    fn from(oid: Oid) -> Self {
+
        Self::from(&oid)
+
    }
+
}
+

+
impl From<&Oid> for Multihash {
+
    fn from(oid: &Oid) -> Self {
+
        multihash::wrap(multihash::Code::Sha1, oid.as_ref())
+
    }
+
}
added git-ext/src/reference.rs
@@ -0,0 +1,26 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::convert::TryFrom as _;
+

+
mod iter;
+
pub use iter::{ReferenceNames, References};
+

+
pub mod name;
+
pub use name::{OneLevel, Qualified, RefLike, RefspecPattern};
+

+
pub mod check {
+
    pub use git_ref_format::{check_ref_format as ref_format, Error, Options};
+
}
+

+
pub fn peeled(head: git2::Reference) -> Option<(String, git2::Oid)> {
+
    head.name()
+
        .and_then(|name| head.target().map(|target| (name.to_owned(), target)))
+
}
+

+
pub fn refined((name, oid): (&str, git2::Oid)) -> Result<(OneLevel, crate::Oid), name::Error> {
+
    let name = RefLike::try_from(name)?;
+
    Ok((OneLevel::from(name), oid.into()))
+
}
added git-ext/src/reference/iter.rs
@@ -0,0 +1,87 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
/// Iterator chaining multiple [`git2::References`]
+
#[must_use = "iterators are lazy and do nothing unless consumed"]
+
pub struct References<'a> {
+
    inner: Vec<git2::References<'a>>,
+
}
+

+
impl<'a> References<'a> {
+
    pub fn new(refs: impl IntoIterator<Item = git2::References<'a>>) -> Self {
+
        Self {
+
            inner: refs.into_iter().collect(),
+
        }
+
    }
+

+
    pub fn from_globs(
+
        repo: &'a git2::Repository,
+
        globs: impl IntoIterator<Item = impl AsRef<str>>,
+
    ) -> Result<Self, git2::Error> {
+
        let globs = globs.into_iter();
+
        let mut iters = globs
+
            .size_hint()
+
            .1
+
            .map(Vec::with_capacity)
+
            .unwrap_or_else(Vec::new);
+
        for glob in globs {
+
            let iter = repo.references_glob(glob.as_ref())?;
+
            iters.push(iter);
+
        }
+

+
        Ok(Self::new(iters))
+
    }
+

+
    pub fn names<'b>(&'b mut self) -> ReferenceNames<'a, 'b> {
+
        ReferenceNames {
+
            inner: self.inner.iter_mut().map(|refs| refs.names()).collect(),
+
        }
+
    }
+

+
    pub fn peeled(self) -> impl Iterator<Item = (String, git2::Oid)> + 'a {
+
        self.filter_map(|reference| {
+
            reference.ok().and_then(|head| {
+
                head.name().and_then(|name| {
+
                    head.target()
+
                        .map(|target| (name.to_owned(), target.to_owned()))
+
                })
+
            })
+
        })
+
    }
+
}
+

+
impl<'a> Iterator for References<'a> {
+
    type Item = Result<git2::Reference<'a>, git2::Error>;
+

+
    fn next(&mut self) -> Option<Self::Item> {
+
        self.inner.pop().and_then(|mut iter| match iter.next() {
+
            None => self.next(),
+
            Some(item) => {
+
                self.inner.push(iter);
+
                Some(item)
+
            },
+
        })
+
    }
+
}
+

+
/// Iterator chaining multiple [`git2::ReferenceNames`]
+
#[must_use = "iterators are lazy and do nothing unless consumed"]
+
pub struct ReferenceNames<'repo, 'references> {
+
    inner: Vec<git2::ReferenceNames<'repo, 'references>>,
+
}
+

+
impl<'a, 'b> Iterator for ReferenceNames<'a, 'b> {
+
    type Item = Result<&'b str, git2::Error>;
+

+
    fn next(&mut self) -> Option<Self::Item> {
+
        self.inner.pop().and_then(|mut iter| match iter.next() {
+
            None => self.next(),
+
            Some(item) => {
+
                self.inner.push(iter);
+
                Some(item)
+
            },
+
        })
+
    }
+
}
added git-ext/src/reference/name.rs
@@ -0,0 +1,711 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    convert::TryFrom,
+
    fmt::{self, Display},
+
    iter::FromIterator,
+
    ops::Deref,
+
    path::Path,
+
    str::{self, FromStr},
+
};
+

+
pub use percent_encoding::PercentEncode;
+
use thiserror::Error;
+

+
use super::check;
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum Error {
+
    #[error("invalid utf8")]
+
    Utf8,
+

+
    #[error("not a valid git ref name or pattern")]
+
    RefFormat(#[from] check::Error),
+
}
+

+
impl Error {
+
    pub const fn empty() -> Self {
+
        Self::RefFormat(check::Error::Empty)
+
    }
+
}
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum StripPrefixError {
+
    #[error("prefix is equal to path")]
+
    ImproperPrefix,
+

+
    #[error("not prefixed by given path")]
+
    NotPrefix,
+
}
+

+
/// An owned path-like value which is a valid git refname.
+
///
+
/// See [`git-check-ref-format`] for what the rules for refnames are --
+
/// conversion functions behave as if `--allow-onelevel` was given.
+
/// Additionally, we impose the rule that the name must consist of valid utf8.
+
///
+
/// Note that refspec patterns (eg. "refs/heads/*") are not allowed (see
+
/// [`RefspecPattern`]), and that the maximum length of the name is 1024 bytes.
+
///
+
/// [`git-check-ref-format`]: https://git-scm.com/docs/git-check-ref-format
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
#[cfg_attr(
+
    feature = "serde",
+
    derive(serde::Serialize, serde::Deserialize),
+
    serde(into = "String", try_from = "String")
+
)]
+
pub struct RefLike(String);
+

+
impl RefLike {
+
    /// Append the path in `Other` to `self.
+
    pub fn join<Other: Into<Self>>(&self, other: Other) -> Self {
+
        Self(format!("{}/{}", self.0, other.into().0))
+
    }
+

+
    /// Append a [`RefspecPattern`], yielding a [`RefspecPattern`]
+
    pub fn with_pattern_suffix<Suf: Into<RefspecPattern>>(&self, suf: Suf) -> RefspecPattern {
+
        RefspecPattern(format!("{}/{}", self.0, suf.into().0))
+
    }
+

+
    /// Returns a [`RefLike`] that, when joined onto `base`, yields `self`.
+
    ///
+
    /// # Errors
+
    ///
+
    /// If `base` is not a prefix of `self`, or `base` equals the path in `self`
+
    /// (ie. the result would be the empty path, which is not a valid
+
    /// [`RefLike`]).
+
    pub fn strip_prefix<P: AsRef<str>>(&self, base: P) -> Result<Self, StripPrefixError> {
+
        let base = base.as_ref();
+
        let base = format!("{}/", base.strip_suffix('/').unwrap_or(base));
+
        self.0
+
            .strip_prefix(&base)
+
            .ok_or(StripPrefixError::NotPrefix)
+
            .and_then(|path| {
+
                if path.is_empty() {
+
                    Err(StripPrefixError::ImproperPrefix)
+
                } else {
+
                    Ok(Self(path.into()))
+
                }
+
            })
+
    }
+

+
    pub fn as_str(&self) -> &str {
+
        self.as_ref()
+
    }
+

+
    pub fn percent_encode(&self) -> PercentEncode {
+
        /// https://url.spec.whatwg.org/#fragment-percent-encode-set
+
        const FRAGMENT_PERCENT_ENCODE_SET: &percent_encoding::AsciiSet =
+
            &percent_encoding::CONTROLS
+
                .add(b' ')
+
                .add(b'"')
+
                .add(b'<')
+
                .add(b'>')
+
                .add(b'`');
+

+
        /// https://url.spec.whatwg.org/#path-percent-encode-set
+
        const PATH_PERCENT_ENCODE_SET: &percent_encoding::AsciiSet = &FRAGMENT_PERCENT_ENCODE_SET
+
            .add(b'#')
+
            .add(b'?')
+
            .add(b'{')
+
            .add(b'}');
+

+
        percent_encoding::utf8_percent_encode(self.as_str(), PATH_PERCENT_ENCODE_SET)
+
    }
+
}
+

+
impl Deref for RefLike {
+
    type Target = str;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<str> for RefLike {
+
    fn as_ref(&self) -> &str {
+
        &self.0
+
    }
+
}
+

+
impl TryFrom<&str> for RefLike {
+
    type Error = Error;
+

+
    fn try_from(s: &str) -> Result<Self, Self::Error> {
+
        check::ref_format(
+
            check::Options {
+
                allow_onelevel: true,
+
                allow_pattern: false,
+
            },
+
            s,
+
        )?;
+
        Ok(Self(s.to_owned()))
+
    }
+
}
+

+
impl TryFrom<&[u8]> for RefLike {
+
    type Error = Error;
+

+
    fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
+
        str::from_utf8(bytes)
+
            .or(Err(Error::Utf8))
+
            .and_then(Self::try_from)
+
    }
+
}
+

+
impl FromStr for RefLike {
+
    type Err = Error;
+

+
    fn from_str(s: &str) -> Result<Self, Self::Err> {
+
        Self::try_from(s)
+
    }
+
}
+

+
impl TryFrom<String> for RefLike {
+
    type Error = Error;
+

+
    fn try_from(s: String) -> Result<Self, Self::Error> {
+
        Self::try_from(s.as_str())
+
    }
+
}
+

+
impl TryFrom<&Path> for RefLike {
+
    type Error = Error;
+

+
    #[cfg(target_family = "windows")]
+
    fn try_from(p: &Path) -> Result<Self, Self::Error> {
+
        use std::{convert::TryInto as _, path::Component::Normal};
+

+
        p.components()
+
            .filter_map(|comp| match comp {
+
                Normal(s) => Some(s),
+
                _ => None,
+
            })
+
            .map(|os| os.to_str().ok_or(Error::Utf8))
+
            .collect::<Result<Vec<_>, Self::Error>>()?
+
            .join("/")
+
            .try_into()
+
    }
+

+
    #[cfg(target_family = "unix")]
+
    fn try_from(p: &Path) -> Result<Self, Self::Error> {
+
        Self::try_from(p.to_str().ok_or(Error::Utf8)?)
+
    }
+
}
+

+
impl From<&RefLike> for RefLike {
+
    fn from(me: &RefLike) -> Self {
+
        me.clone()
+
    }
+
}
+

+
impl From<git_ref_format::RefString> for RefLike {
+
    #[inline]
+
    fn from(r: git_ref_format::RefString) -> Self {
+
        Self(r.into())
+
    }
+
}
+

+
impl From<&git_ref_format::RefString> for RefLike {
+
    #[inline]
+
    fn from(r: &git_ref_format::RefString) -> Self {
+
        Self::from(r.as_refstr())
+
    }
+
}
+

+
impl From<&git_ref_format::RefStr> for RefLike {
+
    #[inline]
+
    fn from(r: &git_ref_format::RefStr) -> Self {
+
        Self(r.to_owned().into())
+
    }
+
}
+

+
impl From<RefLike> for String {
+
    fn from(RefLike(path): RefLike) -> Self {
+
        path
+
    }
+
}
+

+
impl FromIterator<Self> for RefLike {
+
    fn from_iter<T>(iter: T) -> Self
+
    where
+
        T: IntoIterator<Item = Self>,
+
    {
+
        Self(iter.into_iter().map(|x| x.0).collect::<Vec<_>>().join("/"))
+
    }
+
}
+

+
impl Display for RefLike {
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self.as_str())
+
    }
+
}
+

+
/// A [`RefLike`] without a "refs/" prefix.
+
///
+
/// Conversion functions strip the first **two** path components iff the path
+
/// starts with `refs/`.
+
///
+
/// Note that the [`serde::Deserialize`] impl thusly implies that input in
+
/// [`Qualified`] form is accepted, and silently converted.
+
///
+
/// # Examples
+
///
+
/// ```rust
+
/// use std::convert::TryFrom;
+
/// use radicle_git_ext::reference::name::*;
+
///
+
/// assert_eq!(
+
///     &*OneLevel::from(RefLike::try_from("refs/heads/next").unwrap()),
+
///     "next"
+
/// );
+
///
+
/// assert_eq!(
+
///     &*OneLevel::from(RefLike::try_from("refs/remotes/origin/it").unwrap()),
+
///     "origin/it"
+
/// );
+
///
+
/// assert_eq!(
+
///     &*OneLevel::from(RefLike::try_from("mistress").unwrap()),
+
///     "mistress"
+
/// );
+
///
+
/// assert_eq!(
+
///     OneLevel::from_qualified(Qualified::from(RefLike::try_from("refs/tags/grace").unwrap())),
+
///     (
+
///         OneLevel::from(RefLike::try_from("grace").unwrap()),
+
///         Some(RefLike::try_from("tags").unwrap())
+
///     ),
+
/// );
+
///
+
/// assert_eq!(
+
///     OneLevel::from_qualified(Qualified::from(RefLike::try_from("refs/remotes/origin/hopper").unwrap())),
+
///     (
+
///         OneLevel::from(RefLike::try_from("origin/hopper").unwrap()),
+
///         Some(RefLike::try_from("remotes").unwrap())
+
///     ),
+
/// );
+
///
+
/// assert_eq!(
+
///     OneLevel::from_qualified(Qualified::from(RefLike::try_from("refs/HEAD").unwrap())),
+
///     (OneLevel::from(RefLike::try_from("HEAD").unwrap()), None)
+
/// );
+
///
+
/// assert_eq!(
+
///     &*OneLevel::from(RefLike::try_from("origin/hopper").unwrap()).into_qualified(
+
///         RefLike::try_from("remotes").unwrap()
+
///     ),
+
///     "refs/remotes/origin/hopper",
+
/// );
+
/// ```
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
#[cfg_attr(
+
    feature = "serde",
+
    derive(serde::Serialize, serde::Deserialize),
+
    serde(into = "String", try_from = "RefLike")
+
)]
+
pub struct OneLevel(String);
+

+
impl OneLevel {
+
    pub fn as_str(&self) -> &str {
+
        self.as_ref()
+
    }
+

+
    pub fn from_qualified(Qualified(path): Qualified) -> (Self, Option<RefLike>) {
+
        let mut path = path.strip_prefix("refs/").unwrap_or(&path).split('/');
+
        match path.next() {
+
            Some(category) => {
+
                let category = RefLike(category.into());
+
                // check that the "category" is not the only component of the path
+
                match path.next() {
+
                    Some(head) => (
+
                        Self(
+
                            std::iter::once(head)
+
                                .chain(path)
+
                                .collect::<Vec<_>>()
+
                                .join("/"),
+
                        ),
+
                        Some(category),
+
                    ),
+
                    None => (Self::from(category), None),
+
                }
+
            },
+
            None => unreachable!(),
+
        }
+
    }
+

+
    pub fn into_qualified(self, category: RefLike) -> Qualified {
+
        Qualified(format!("refs/{}/{}", category, self))
+
    }
+
}
+

+
impl Deref for OneLevel {
+
    type Target = str;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<str> for OneLevel {
+
    fn as_ref(&self) -> &str {
+
        self
+
    }
+
}
+

+
impl From<RefLike> for OneLevel {
+
    fn from(RefLike(path): RefLike) -> Self {
+
        if path.starts_with("refs/") {
+
            Self(path.split('/').skip(2).collect::<Vec<_>>().join("/"))
+
        } else {
+
            Self(path)
+
        }
+
    }
+
}
+

+
impl From<Qualified> for OneLevel {
+
    fn from(Qualified(path): Qualified) -> Self {
+
        Self::from(RefLike(path))
+
    }
+
}
+

+
impl From<OneLevel> for RefLike {
+
    fn from(OneLevel(path): OneLevel) -> Self {
+
        Self(path)
+
    }
+
}
+

+
impl From<OneLevel> for String {
+
    fn from(OneLevel(path): OneLevel) -> Self {
+
        path
+
    }
+
}
+

+
impl Display for OneLevel {
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self.as_str())
+
    }
+
}
+

+
/// A [`RefLike`] **with** a "refs/" prefix.
+
///
+
/// Conversion functions will assume `refs/heads/` if the input was not
+
/// qualified.
+
///
+
/// Note that the [`serde::Deserialize`] impl thusly implies that input in
+
/// [`OneLevel`] form is accepted, and silently converted.
+
///
+
/// # Examples
+
///
+
/// ```rust
+
/// use std::convert::TryFrom;
+
/// use radicle_git_ext::reference::name::*;
+
///
+
/// assert_eq!(
+
///     &*Qualified::from(RefLike::try_from("laplace").unwrap()),
+
///     "refs/heads/laplace"
+
/// );
+
///
+
/// assert_eq!(
+
///     &*Qualified::from(RefLike::try_from("refs/heads/pu").unwrap()),
+
///     "refs/heads/pu"
+
/// );
+
///
+
/// assert_eq!(
+
///     &*Qualified::from(RefLike::try_from("refs/tags/v6.6.6").unwrap()),
+
///     "refs/tags/v6.6.6"
+
/// );
+
/// ```
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
#[cfg_attr(
+
    feature = "serde",
+
    derive(serde::Serialize, serde::Deserialize),
+
    serde(into = "String", try_from = "RefLike")
+
)]
+
pub struct Qualified(String);
+

+
impl Qualified {
+
    pub fn as_str(&self) -> &str {
+
        &self.0
+
    }
+
}
+

+
impl Deref for Qualified {
+
    type Target = str;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<str> for Qualified {
+
    fn as_ref(&self) -> &str {
+
        &self.0
+
    }
+
}
+

+
impl From<RefLike> for Qualified {
+
    fn from(RefLike(path): RefLike) -> Self {
+
        if path.starts_with("refs/") {
+
            Self(path)
+
        } else {
+
            Self(format!("refs/heads/{}", path))
+
        }
+
    }
+
}
+

+
impl From<OneLevel> for Qualified {
+
    fn from(OneLevel(path): OneLevel) -> Self {
+
        Self::from(RefLike(path))
+
    }
+
}
+

+
impl From<Qualified> for RefLike {
+
    fn from(Qualified(path): Qualified) -> Self {
+
        Self(path)
+
    }
+
}
+

+
impl From<Qualified> for String {
+
    fn from(Qualified(path): Qualified) -> Self {
+
        path
+
    }
+
}
+

+
impl Display for Qualified {
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self)
+
    }
+
}
+

+
/// An owned, path-like value which is a valid refspec pattern.
+
///
+
/// Conversion functions behave as if `--allow-onelevel --refspec-pattern` where
+
/// given to [`git-check-ref-format`]. That is, most of the rules of [`RefLike`]
+
/// apply, but the path _may_ contain exactly one `*` character.
+
///
+
/// [`git-check-ref-format`]: https://git-scm.com/docs/git-check-ref-format
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
#[cfg_attr(
+
    feature = "serde",
+
    derive(serde::Serialize, serde::Deserialize),
+
    serde(into = "String", try_from = "String")
+
)]
+
pub struct RefspecPattern(String);
+

+
impl RefspecPattern {
+
    /// Append the `RefLike` to the `RefspecPattern`. This allows the creation
+
    /// of patterns where the `*` appears in the middle of the path, e.g.
+
    /// `refs/remotes/*/mfdoom`
+
    pub fn append(&self, refl: impl Into<RefLike>) -> Self {
+
        RefspecPattern(format!("{}/{}", self.0, refl.into()))
+
    }
+

+
    pub fn as_str(&self) -> &str {
+
        self.as_ref()
+
    }
+
}
+

+
impl From<&RefspecPattern> for RefspecPattern {
+
    fn from(pat: &RefspecPattern) -> Self {
+
        pat.clone()
+
    }
+
}
+

+
impl Deref for RefspecPattern {
+
    type Target = str;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<str> for RefspecPattern {
+
    fn as_ref(&self) -> &str {
+
        self
+
    }
+
}
+

+
impl TryFrom<&str> for RefspecPattern {
+
    type Error = Error;
+

+
    fn try_from(s: &str) -> Result<Self, Self::Error> {
+
        check::ref_format(
+
            check::Options {
+
                allow_onelevel: true,
+
                allow_pattern: true,
+
            },
+
            s,
+
        )?;
+
        Ok(Self(s.to_owned()))
+
    }
+
}
+

+
impl TryFrom<&[u8]> for RefspecPattern {
+
    type Error = Error;
+

+
    fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
+
        str::from_utf8(bytes)
+
            .or(Err(Error::Utf8))
+
            .and_then(Self::try_from)
+
    }
+
}
+

+
impl FromStr for RefspecPattern {
+
    type Err = Error;
+

+
    fn from_str(s: &str) -> Result<Self, Self::Err> {
+
        Self::try_from(s)
+
    }
+
}
+

+
impl TryFrom<String> for RefspecPattern {
+
    type Error = Error;
+

+
    fn try_from(s: String) -> Result<Self, Self::Error> {
+
        Self::try_from(s.as_str())
+
    }
+
}
+

+
impl From<RefspecPattern> for String {
+
    fn from(RefspecPattern(path): RefspecPattern) -> Self {
+
        path
+
    }
+
}
+

+
impl Display for RefspecPattern {
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self.as_str())
+
    }
+
}
+

+
// `RefLike`-likes can be coerced into `RefspecPattern`s
+

+
impl From<RefLike> for RefspecPattern {
+
    fn from(RefLike(path): RefLike) -> Self {
+
        Self(path)
+
    }
+
}
+

+
impl From<&RefLike> for RefspecPattern {
+
    fn from(RefLike(path): &RefLike) -> Self {
+
        Self(path.to_owned())
+
    }
+
}
+

+
impl From<OneLevel> for RefspecPattern {
+
    fn from(OneLevel(path): OneLevel) -> Self {
+
        Self(path)
+
    }
+
}
+

+
impl From<&OneLevel> for RefspecPattern {
+
    fn from(OneLevel(path): &OneLevel) -> Self {
+
        Self(path.to_owned())
+
    }
+
}
+

+
impl From<Qualified> for RefspecPattern {
+
    fn from(Qualified(path): Qualified) -> Self {
+
        Self(path)
+
    }
+
}
+

+
impl From<&Qualified> for RefspecPattern {
+
    fn from(Qualified(path): &Qualified) -> Self {
+
        Self(path.to_owned())
+
    }
+
}
+

+
impl From<git_ref_format::refspec::PatternString> for RefspecPattern {
+
    #[inline]
+
    fn from(r: git_ref_format::refspec::PatternString) -> Self {
+
        Self(r.into())
+
    }
+
}
+

+
impl From<&git_ref_format::refspec::PatternStr> for RefspecPattern {
+
    #[inline]
+
    fn from(r: &git_ref_format::refspec::PatternStr) -> Self {
+
        Self(r.to_owned().into())
+
    }
+
}
+

+
#[cfg(feature = "minicbor")]
+
mod minicbor_impls {
+
    use super::*;
+
    use minicbor::{
+
        decode,
+
        encode::{self, Write},
+
        Decode,
+
        Decoder,
+
        Encode,
+
        Encoder,
+
    };
+

+
    impl Encode for RefLike {
+
        fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
            e.str(self.as_str())?;
+
            Ok(())
+
        }
+
    }
+

+
    impl<'b> Decode<'b> for RefLike {
+
        fn decode(d: &mut Decoder) -> Result<Self, decode::Error> {
+
            let path = d.str()?;
+
            Self::try_from(path).or(Err(decode::Error::Message("invalid reflike")))
+
        }
+
    }
+

+
    impl minicbor::Encode for OneLevel {
+
        fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
            e.str(self.as_str())?;
+
            Ok(())
+
        }
+
    }
+

+
    impl<'b> Decode<'b> for OneLevel {
+
        fn decode(d: &mut Decoder) -> Result<Self, decode::Error> {
+
            let refl: RefLike = Decode::decode(d)?;
+
            Ok(Self::from(refl))
+
        }
+
    }
+

+
    impl Encode for Qualified {
+
        fn encode<W: encode::Write>(
+
            &self,
+
            e: &mut Encoder<W>,
+
        ) -> Result<(), encode::Error<W::Error>> {
+
            e.str(self.as_str())?;
+
            Ok(())
+
        }
+
    }
+

+
    impl<'b> Decode<'b> for Qualified {
+
        fn decode(d: &mut Decoder) -> Result<Self, decode::Error> {
+
            let refl: RefLike = Decode::decode(d)?;
+
            Ok(Self::from(refl))
+
        }
+
    }
+

+
    impl Encode for RefspecPattern {
+
        fn encode<W: encode::Write>(
+
            &self,
+
            e: &mut Encoder<W>,
+
        ) -> Result<(), encode::Error<W::Error>> {
+
            e.str(self.as_str())?;
+
            Ok(())
+
        }
+
    }
+

+
    impl<'b> Decode<'b> for RefspecPattern {
+
        fn decode(d: &mut Decoder) -> Result<Self, decode::Error> {
+
            Self::try_from(d.str()?).or(Err(decode::Error::Message("invalid refspec pattern")))
+
        }
+
    }
+
}
added git-ext/src/revwalk.rs
@@ -0,0 +1,44 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
pub enum Start {
+
    Oid(git2::Oid),
+
    Ref(String),
+
}
+

+
pub struct FirstParent<'a> {
+
    inner: git2::Revwalk<'a>,
+
}
+

+
impl<'a> FirstParent<'a> {
+
    pub fn new(repo: &'a git2::Repository, start: Start) -> Result<Self, git2::Error> {
+
        let mut revwalk = repo.revwalk()?;
+
        revwalk.set_sorting(git2::Sort::TOPOLOGICAL)?;
+
        revwalk.simplify_first_parent()?;
+

+
        match start {
+
            Start::Oid(oid) => revwalk.push(oid),
+
            Start::Ref(name) => revwalk.push_ref(&name),
+
        }?;
+

+
        Ok(Self { inner: revwalk })
+
    }
+

+
    pub fn reverse(mut self) -> Result<Self, git2::Error> {
+
        let mut sort = git2::Sort::TOPOLOGICAL;
+
        sort.insert(git2::Sort::REVERSE);
+
        self.inner.set_sorting(sort)?;
+
        Ok(self)
+
    }
+
}
+

+
impl<'a> IntoIterator for FirstParent<'a> {
+
    type Item = Result<git2::Oid, git2::Error>;
+
    type IntoIter = git2::Revwalk<'a>;
+

+
    fn into_iter(self) -> Self::IntoIter {
+
        self.inner
+
    }
+
}
added git-ext/src/transport.rs
@@ -0,0 +1,7 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
pub const UPLOAD_PACK_HEADER: &[u8] = b"001e# service=git-upload-pack\n0000";
+
pub const RECEIVE_PACK_HEADER: &[u8] = b"001f# service=git-receive-pack\n0000";
added git-ext/src/tree.rs
@@ -0,0 +1,147 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{borrow::Cow, collections::BTreeMap, iter::FromIterator};
+

+
/// A simplified representation of a git tree, intended mainly to be created
+
/// from literals.
+
///
+
/// # Example
+
///
+
/// ```
+
/// use radicle_git_ext::tree::{Tree, blob};
+
///
+
/// let my_tree = vec![
+
///     ("README", blob(b"awe")),
+
///     ("src", vec![("main.rs", blob(b"fn main() {}"))].into_iter().collect()),
+
/// ]
+
/// .into_iter()
+
/// .collect::<Tree>();
+
///
+
/// assert_eq!(
+
///     format!("{:?}", my_tree),
+
///     "Tree(\
+
///         {\
+
///             \"README\": Blob(\
+
///                 [\
+
///                     97, \
+
///                     119, \
+
///                     101\
+
///                 ]\
+
///             ), \
+
///             \"src\": Tree(\
+
///                 Tree(\
+
///                     {\
+
///                         \"main.rs\": Blob(\
+
///                             [\
+
///                                 102, \
+
///                                 110, \
+
///                                 32, \
+
///                                 109, \
+
///                                 97, \
+
///                                 105, \
+
///                                 110, \
+
///                                 40, \
+
///                                 41, \
+
///                                 32, \
+
///                                 123, \
+
///                                 125\
+
///                             ]\
+
///                         )\
+
///                     }\
+
///                 )\
+
///             )\
+
///         }\
+
///     )"
+
/// )
+
/// ```
+
#[derive(Clone, Debug)]
+
pub struct Tree<'a>(BTreeMap<Cow<'a, str>, Node<'a>>);
+

+
impl Tree<'_> {
+
    pub fn write(&self, repo: &git2::Repository) -> Result<git2::Oid, git2::Error> {
+
        use Node::*;
+

+
        let mut builder = repo.treebuilder(None)?;
+
        for (name, node) in &self.0 {
+
            match node {
+
                Blob(data) => {
+
                    let oid = repo.blob(data)?;
+
                    builder.insert(name.as_ref(), oid, git2::FileMode::Blob.into())?;
+
                },
+
                Tree(sub) => {
+
                    let oid = sub.write(repo)?;
+
                    builder.insert(name.as_ref(), oid, git2::FileMode::Tree.into())?;
+
                },
+
            }
+
        }
+

+
        builder.write()
+
    }
+
}
+

+
impl<'a> From<BTreeMap<Cow<'a, str>, Node<'a>>> for Tree<'a> {
+
    fn from(map: BTreeMap<Cow<'a, str>, Node<'a>>) -> Self {
+
        Self(map)
+
    }
+
}
+

+
impl<'a, K, N> FromIterator<(K, N)> for Tree<'a>
+
where
+
    K: Into<Cow<'a, str>>,
+
    N: Into<Node<'a>>,
+
{
+
    fn from_iter<T>(iter: T) -> Self
+
    where
+
        T: IntoIterator<Item = (K, N)>,
+
    {
+
        Self(
+
            iter.into_iter()
+
                .map(|(k, v)| (k.into(), v.into()))
+
                .collect(),
+
        )
+
    }
+
}
+

+
#[derive(Clone, Debug)]
+
pub enum Node<'a> {
+
    Blob(Cow<'a, [u8]>),
+
    Tree(Tree<'a>),
+
}
+

+
pub fn blob(slice: &[u8]) -> Node {
+
    Node::from(slice)
+
}
+

+
impl<'a> From<&'a [u8]> for Node<'a> {
+
    fn from(slice: &'a [u8]) -> Self {
+
        Self::from(Cow::Borrowed(slice))
+
    }
+
}
+

+
impl<'a> From<Cow<'a, [u8]>> for Node<'a> {
+
    fn from(bytes: Cow<'a, [u8]>) -> Self {
+
        Self::Blob(bytes)
+
    }
+
}
+

+
impl<'a> From<Tree<'a>> for Node<'a> {
+
    fn from(tree: Tree<'a>) -> Self {
+
        Self::Tree(tree)
+
    }
+
}
+

+
impl<'a, K, N> FromIterator<(K, N)> for Node<'a>
+
where
+
    K: Into<Cow<'a, str>>,
+
    N: Into<Node<'a>>,
+
{
+
    fn from_iter<T>(iter: T) -> Self
+
    where
+
        T: IntoIterator<Item = (K, N)>,
+
    {
+
        Self::Tree(iter.into_iter().collect())
+
    }
+
}
added git-ext/t/Cargo.toml
@@ -0,0 +1,28 @@
+
[package]
+
name = "git-ext-test"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+

+
publish = false
+

+
[lib]
+
doctest = false
+
test = true
+
doc = false
+

+
[features]
+
test = []
+

+
[dev-dependencies]
+
assert_matches = "1.5"
+
minicbor = "0.13"
+
serde = "1"
+
serde_json = "1"
+

+
[dev-dependencies.radicle-git-ext]
+
path = ".."
+
features = ["minicbor", "serde"]
+

+
[dev-dependencies.test-helpers]
+
path = "../../test/test-helpers"
added git-ext/t/src/lib.rs
@@ -0,0 +1,9 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
#[cfg(test)]
+
#[macro_use]
+
extern crate assert_matches;
+

+
#[cfg(test)]
+
mod tests;
added git-ext/t/src/tests.rs
@@ -0,0 +1,255 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
use std::convert::TryFrom;
+

+
use radicle_git_ext::reference::{check, name::*};
+
use test_helpers::roundtrip;
+

+
mod common {
+
    use super::*;
+
    use std::fmt::Debug;
+

+
    pub fn invalid<T>()
+
    where
+
        T: TryFrom<&'static str, Error = Error> + Debug,
+
    {
+
        const INVALID: [&str; 16] = [
+
            ".hidden",
+
            "/etc/shadow",
+
            "@",
+
            "@{",
+
            "C:",
+
            "\\WORKGROUP",
+
            "foo.lock",
+
            "head^",
+
            "here/../../etc/shadow",
+
            "refs//heads/main",
+
            "refs/heads/",
+
            "shawn/ white",
+
            "the/dotted./quad",
+
            "wh?t",
+
            "x[a-z]",
+
            "~ommij",
+
        ];
+

+
        for v in INVALID {
+
            assert_matches!(T::try_from(v), Err(Error::RefFormat(_)), "input: {}", v)
+
        }
+
    }
+

+
    pub fn valid<T>()
+
    where
+
        T: TryFrom<&'static str, Error = Error> + AsRef<str> + Debug,
+
    {
+
        const VALID: [&str; 5] = [
+
            "\u{1F32F}",
+
            "cl@wn",
+
            "foo/bar",
+
            "master",
+
            "refs/heads/mistress",
+
        ];
+

+
        for v in VALID {
+
            assert_matches!(T::try_from(v), Ok(x) if x.as_ref() == v, "input: {}", v)
+
        }
+
    }
+

+
    pub fn empty<T>()
+
    where
+
        T: TryFrom<&'static str, Error = Error> + Debug,
+
    {
+
        assert_matches!(T::try_from(""), Err(Error::RefFormat(check::Error::Empty)))
+
    }
+

+
    pub fn nulsafe<T>()
+
    where
+
        T: TryFrom<&'static str, Error = Error> + Debug,
+
    {
+
        assert_matches!(
+
            T::try_from("jeff\0"),
+
            Err(Error::RefFormat(check::Error::InvalidChar('\0')))
+
        )
+
    }
+
}
+

+
mod reflike {
+
    use super::*;
+

+
    #[test]
+
    fn empty() {
+
        common::empty::<RefLike>()
+
    }
+

+
    #[test]
+
    fn valid() {
+
        common::valid::<RefLike>()
+
    }
+

+
    #[test]
+
    fn invalid() {
+
        common::invalid::<RefLike>()
+
    }
+

+
    #[test]
+
    fn nulsafe() {
+
        common::nulsafe::<RefLike>()
+
    }
+

+
    #[test]
+
    fn globstar_invalid() {
+
        assert_matches!(
+
            RefLike::try_from("refs/heads/*"),
+
            Err(Error::RefFormat(check::Error::InvalidChar('*')))
+
        )
+
    }
+

+
    #[test]
+
    fn into_onelevel() {
+
        assert_eq!(
+
            &*OneLevel::from(RefLike::try_from("refs/heads/next").unwrap()),
+
            "next"
+
        )
+
    }
+

+
    #[test]
+
    fn into_heads() {
+
        assert_eq!(
+
            &*Qualified::from(RefLike::try_from("pu").unwrap()),
+
            "refs/heads/pu"
+
        )
+
    }
+

+
    #[test]
+
    fn serde() {
+
        let refl = RefLike::try_from("pu").unwrap();
+
        roundtrip::json(refl.clone());
+
        roundtrip::json(OneLevel::from(refl.clone()));
+
        roundtrip::json(Qualified::from(refl))
+
    }
+

+
    #[test]
+
    fn serde_invalid() {
+
        let json = serde_json::to_string("HEAD^").unwrap();
+
        assert!(serde_json::from_str::<RefLike>(&json).is_err());
+
        assert!(serde_json::from_str::<OneLevel>(&json).is_err());
+
        assert!(serde_json::from_str::<Qualified>(&json).is_err())
+
    }
+

+
    #[test]
+
    fn cbor() {
+
        let refl = RefLike::try_from("pu").unwrap();
+
        roundtrip::cbor(refl.clone());
+
        roundtrip::cbor(OneLevel::from(refl.clone()));
+
        roundtrip::cbor(Qualified::from(refl))
+
    }
+

+
    #[test]
+
    fn cbor_invalid() {
+
        let cbor = minicbor::to_vec("HEAD^").unwrap();
+
        assert!(minicbor::decode::<RefLike>(&cbor).is_err());
+
        assert!(minicbor::decode::<OneLevel>(&cbor).is_err());
+
        assert!(minicbor::decode::<Qualified>(&cbor).is_err())
+
    }
+
}
+

+
mod pattern {
+
    use super::*;
+

+
    #[test]
+
    fn empty() {
+
        common::empty::<RefspecPattern>()
+
    }
+

+
    #[test]
+
    fn valid() {
+
        common::valid::<RefspecPattern>()
+
    }
+

+
    #[test]
+
    fn invalid() {
+
        common::invalid::<RefspecPattern>()
+
    }
+

+
    #[test]
+
    fn nulsafe() {
+
        common::nulsafe::<RefspecPattern>()
+
    }
+

+
    #[test]
+
    fn globstar_ok() {
+
        const GLOBBED: [&str; 7] = [
+
            "*",
+
            "fo*",
+
            "fo*/bar",
+
            "foo/*/bar",
+
            "foo/ba*",
+
            "foo/bar/*",
+
            "foo/b*r",
+
        ];
+

+
        for v in GLOBBED {
+
            assert_matches!(
+
                RefspecPattern::try_from(v),
+
                Ok(ref x) if x.as_str() == v,
+
                "input: {}", v
+
            )
+
        }
+
    }
+

+
    #[test]
+
    fn globstar_invalid() {
+
        const GLOBBED: [&str; 12] = [
+
            "**",
+
            "***",
+
            "*/*",
+
            "*/L/*",
+
            "fo*/*/bar",
+
            "fo*/ba*",
+
            "fo*/ba*/baz",
+
            "fo*/ba*/ba*",
+
            "fo*/bar/*",
+
            "foo/*/bar/*",
+
            "foo/*/bar/*/baz*",
+
            "foo/*/bar/*/baz/*",
+
        ];
+

+
        for v in GLOBBED {
+
            assert_matches!(
+
                RefspecPattern::try_from(v),
+
                Err(Error::RefFormat(check::Error::Pattern))
+
            )
+
        }
+
    }
+

+
    #[test]
+
    fn serde() {
+
        roundtrip::json(RefspecPattern::try_from("refs/heads/*").unwrap())
+
    }
+

+
    #[test]
+
    fn serde_invalid() {
+
        let json = serde_json::to_string("HEAD^").unwrap();
+
        assert!(serde_json::from_str::<RefspecPattern>(&json).is_err())
+
    }
+

+
    #[test]
+
    fn cbor() {
+
        roundtrip::cbor(RefspecPattern::try_from("refs/heads/*").unwrap())
+
    }
+

+
    #[test]
+
    fn cbor_invalid() {
+
        let cbor = minicbor::to_vec("HEAD^").unwrap();
+
        assert!(minicbor::decode::<RefspecPattern>(&cbor).is_err())
+
    }
+

+
    #[test]
+
    fn strip_prefix_works_for_different_ends() {
+
        let refl = RefLike::try_from("refs/heads/next").unwrap();
+
        assert_eq!(
+
            refl.strip_prefix("refs/heads").unwrap(),
+
            refl.strip_prefix("refs/heads/").unwrap()
+
        );
+
    }
+
}
added git-ref-format/Cargo.toml
@@ -0,0 +1,25 @@
+
[package]
+
name = "git-ref-format"
+
version = "0.1.0"
+
authors = ["Kim Altintop <kim@eagain.st>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[features]
+
bstr = ["git-ref-format-core/bstr"]
+
link-literals = ["git-ref-format-core/link-literals"]
+
macro = ["git-ref-format-macro"]
+
minicbor = ["git-ref-format-core/minicbor"]
+
percent-encoding = ["git-ref-format-core/percent-encoding"]
+
serde = ["git-ref-format-core/serde"]
+

+
[dependencies.git-ref-format-core]
+
path = "./core"
+

+
[dependencies.git-ref-format-macro]
+
path = "./macro"
+
optional = true
added git-ref-format/core/Cargo.toml
@@ -0,0 +1,34 @@
+
[package]
+
name = "git-ref-format-core"
+
version = "0.1.0"
+
authors = ["Kim Altintop <kim@eagain.st>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[features]
+
link-literals = []
+

+
[dependencies]
+
thiserror = "1.0"
+

+
[dependencies.bstr]
+
version = "0.2"
+
optional = true
+

+
[dependencies.minicbor]
+
version = "0.13"
+
features = ["std"]
+
optional = true
+

+
[dependencies.percent-encoding]
+
version = "2.1.0"
+
optional = true
+

+
[dependencies.serde]
+
version = "1.0"
+
features = ["derive"]
+
optional = true
added git-ref-format/core/src/cbor.rs
@@ -0,0 +1,117 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::convert::TryFrom;
+

+
use minicbor::{
+
    decode,
+
    encode::{self, Write},
+
    Decode,
+
    Decoder,
+
    Encode,
+
    Encoder,
+
};
+

+
use crate::{
+
    refspec::{PatternStr, PatternString},
+
    Namespaced,
+
    Qualified,
+
    RefStr,
+
    RefString,
+
};
+

+
impl<'de: 'a, 'a> Decode<'de> for &'a RefStr {
+
    #[inline]
+
    fn decode(d: &mut Decoder<'de>) -> Result<Self, decode::Error> {
+
        d.str()
+
            .and_then(|s| Self::try_from(s).map_err(|e| decode::Error::Custom(Box::new(e))))
+
    }
+
}
+

+
impl<'a> Encode for &'a RefStr {
+
    #[inline]
+
    fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
        e.str(self.as_str())?;
+
        Ok(())
+
    }
+
}
+

+
impl<'de> Decode<'de> for RefString {
+
    #[inline]
+
    fn decode(d: &mut Decoder<'de>) -> Result<Self, decode::Error> {
+
        Decode::decode(d).map(|s: &RefStr| s.to_owned())
+
    }
+
}
+

+
impl Encode for RefString {
+
    #[inline]
+
    fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
        self.as_refstr().encode(e)
+
    }
+
}
+

+
impl<'de: 'a, 'a> Decode<'de> for &'a PatternStr {
+
    #[inline]
+
    fn decode(d: &mut Decoder<'de>) -> Result<Self, decode::Error> {
+
        d.str()
+
            .and_then(|s| Self::try_from(s).map_err(|e| decode::Error::Custom(Box::new(e))))
+
    }
+
}
+

+
impl<'a> Encode for &'a PatternStr {
+
    #[inline]
+
    fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
        e.str(self.as_str())?;
+
        Ok(())
+
    }
+
}
+

+
impl<'de> Decode<'de> for PatternString {
+
    #[inline]
+
    fn decode(d: &mut Decoder<'de>) -> Result<Self, decode::Error> {
+
        Decode::decode(d).map(|s: &PatternStr| s.to_owned())
+
    }
+
}
+

+
impl Encode for PatternString {
+
    #[inline]
+
    fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
        self.as_pattern_str().encode(e)
+
    }
+
}
+

+
impl<'de: 'a, 'a> Decode<'de> for Qualified<'a> {
+
    #[inline]
+
    fn decode(d: &mut Decoder<'de>) -> Result<Self, decode::Error> {
+
        Decode::decode(d).and_then(|s: &RefStr| {
+
            s.qualified()
+
                .ok_or(decode::Error::Message("not a qualified ref"))
+
        })
+
    }
+
}
+

+
impl<'a> Encode for Qualified<'a> {
+
    #[inline]
+
    fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
        self.as_str().encode(e)
+
    }
+
}
+

+
impl<'de: 'a, 'a> Decode<'de> for Namespaced<'a> {
+
    #[inline]
+
    fn decode(d: &mut Decoder<'de>) -> Result<Self, decode::Error> {
+
        Decode::decode(d).and_then(|s: &RefStr| {
+
            s.namespaced()
+
                .ok_or(decode::Error::Message("not a namespaced ref"))
+
        })
+
    }
+
}
+

+
impl<'a> Encode for Namespaced<'a> {
+
    #[inline]
+
    fn encode<W: Write>(&self, e: &mut Encoder<W>) -> Result<(), encode::Error<W::Error>> {
+
        self.as_str().encode(e)
+
    }
+
}
added git-ref-format/core/src/check.rs
@@ -0,0 +1,106 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use thiserror::Error;
+

+
pub struct Options {
+
    /// If `false`, the refname must contain at least one `/`.
+
    pub allow_onelevel: bool,
+
    /// If `true`, the refname may contain exactly one `*` character.
+
    pub allow_pattern: bool,
+
}
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum Error {
+
    #[error("empty input")]
+
    Empty,
+
    #[error("lone '@' character")]
+
    LoneAt,
+
    #[error("consecutive or trailing slash")]
+
    Slash,
+
    #[error("ends with '.lock'")]
+
    DotLock,
+
    #[error("consecutive dots ('..')")]
+
    DotDot,
+
    #[error("at-open-brace ('@{{')")]
+
    AtOpenBrace,
+
    #[error("invalid character {0:?}")]
+
    InvalidChar(char),
+
    #[error("component starts with '.'")]
+
    StartsDot,
+
    #[error("component ends with '.'")]
+
    EndsDot,
+
    #[error("control character")]
+
    Control,
+
    #[error("whitespace")]
+
    Space,
+
    #[error("must contain at most one '*'")]
+
    Pattern,
+
    #[error("must contain at least one '/'")]
+
    OneLevel,
+
}
+

+
/// Validate that a string slice is a valid refname.
+
pub fn ref_format(opts: Options, s: &str) -> Result<(), Error> {
+
    match s {
+
        "" => Err(Error::Empty),
+
        "@" => Err(Error::LoneAt),
+
        "." => Err(Error::StartsDot),
+
        _ => {
+
            let mut globs = 0usize;
+
            let mut parts = 0usize;
+

+
            for x in s.split('/') {
+
                if x.is_empty() {
+
                    return Err(Error::Slash);
+
                }
+

+
                parts += 1;
+

+
                if x.ends_with(".lock") {
+
                    return Err(Error::DotLock);
+
                }
+

+
                let last_char = x.chars().count() - 1;
+
                for (i, y) in x.chars().zip(x.chars().cycle().skip(1)).enumerate() {
+
                    match y {
+
                        ('.', '.') => return Err(Error::DotDot),
+
                        ('@', '{') => return Err(Error::AtOpenBrace),
+

+
                        ('\0', _) => return Err(Error::InvalidChar('\0')),
+
                        ('\\', _) => return Err(Error::InvalidChar('\\')),
+
                        ('~', _) => return Err(Error::InvalidChar('~')),
+
                        ('^', _) => return Err(Error::InvalidChar('^')),
+
                        (':', _) => return Err(Error::InvalidChar(':')),
+
                        ('?', _) => return Err(Error::InvalidChar('?')),
+
                        ('[', _) => return Err(Error::InvalidChar('[')),
+

+
                        ('*', _) => globs += 1,
+

+
                        ('.', _) if i == 0 => return Err(Error::StartsDot),
+
                        ('.', _) if i == last_char => return Err(Error::EndsDot),
+

+
                        (' ', _) => return Err(Error::Space),
+

+
                        (z, _) if z.is_ascii_control() => return Err(Error::Control),
+

+
                        _ => continue,
+
                    }
+
                }
+
            }
+

+
            if parts < 2 && !opts.allow_onelevel {
+
                Err(Error::OneLevel)
+
            } else if globs > 1 && opts.allow_pattern {
+
                Err(Error::Pattern)
+
            } else if globs > 0 && !opts.allow_pattern {
+
                Err(Error::InvalidChar('*'))
+
            } else {
+
                Ok(())
+
            }
+
        },
+
    }
+
}
added git-ref-format/core/src/deriv.rs
@@ -0,0 +1,437 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    borrow::Cow,
+
    fmt::{self, Display},
+
    ops::Deref,
+
};
+

+
use crate::{lit, name, Component, RefStr, RefString};
+

+
/// A fully-qualified refname.
+
///
+
/// A refname is qualified _iff_ it starts with "refs/" and has at least three
+
/// components. This implies that a [`Qualified`] ref has a category, such as
+
/// "refs/heads/main".
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Qualified<'a>(pub(crate) Cow<'a, RefStr>);
+

+
impl<'a> Qualified<'a> {
+
    /// Infallibly create a [`Qualified`] from components.
+
    ///
+
    /// Note that the "refs/" prefix is implicitly added, so `a` is the second
+
    /// [`Component`]. Mirroring [`Self::non_empty_components`], providing
+
    /// two [`Component`]s guarantees well-formedness of the [`Qualified`].
+
    /// `tail` may be empty.
+
    ///
+
    /// # Example
+
    ///
+
    /// ```no_run
+
    /// use git_ref_format::{component, Qualified};
+
    ///
+
    /// assert_eq!(
+
    ///     "refs/heads/main",
+
    ///     Qualified::from_components(component::HEADS, component::MAIN, None).as_str()
+
    /// )
+
    /// ```
+
    pub fn from_components<'b, 'c, 'd, A, B, C>(a: A, b: B, tail: C) -> Self
+
    where
+
        A: Into<Component<'b>>,
+
        B: Into<Component<'c>>,
+
        C: IntoIterator<Item = Component<'d>>,
+
    {
+
        let mut inner = name::REFS.join(a.into()).and(b.into());
+
        inner.extend(tail);
+

+
        Self(inner.into())
+
    }
+

+
    pub fn from_refstr(r: impl Into<Cow<'a, RefStr>>) -> Option<Self> {
+
        Self::_from_refstr(r.into())
+
    }
+

+
    fn _from_refstr(r: Cow<'a, RefStr>) -> Option<Self> {
+
        let mut iter = r.iter();
+
        match (iter.next()?, iter.next()?, iter.next()?) {
+
            ("refs", _, _) => Some(Qualified(r)),
+
            _ => None,
+
        }
+
    }
+

+
    #[inline]
+
    pub fn as_str(&self) -> &str {
+
        self.as_ref()
+
    }
+

+
    #[inline]
+
    pub fn join<'b, R>(&self, other: R) -> Qualified<'b>
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        Qualified(self.0.join(other).into())
+
    }
+

+
    #[inline]
+
    pub fn namespaced(&self) -> Option<Namespaced> {
+
        self.0.as_ref().into()
+
    }
+

+
    /// Add a namespace.
+
    ///
+
    /// Creates a new [`Namespaced`] by prefxing `self` with
+
    /// "refs/namespaces/<ns>".
+
    pub fn add_namespace<'b>(&self, ns: Component<'b>) -> Namespaced<'a> {
+
        Namespaced(Cow::Owned(
+
            IntoIterator::into_iter([lit::Refs.into(), lit::Namespaces.into(), ns])
+
                .chain(self.0.components())
+
                .collect(),
+
        ))
+
    }
+

+
    /// Like [`Self::non_empty_components`], but with string slices.
+
    pub fn non_empty_iter(&self) -> (&str, &str, &str, name::Iter) {
+
        let mut iter = self.iter();
+
        (
+
            iter.next().unwrap(),
+
            iter.next().unwrap(),
+
            iter.next().unwrap(),
+
            iter,
+
        )
+
    }
+

+
    /// Return the first three [`Component`]s, and a possibly empty iterator
+
    /// over the remaining ones.
+
    ///
+
    /// A qualified ref is guaranteed to have at least three components, which
+
    /// this method provides a witness of. This is useful eg. for pattern
+
    /// matching on the prefix.
+
    pub fn non_empty_components(&self) -> (Component, Component, Component, name::Components) {
+
        let mut cs = self.components();
+
        (
+
            cs.next().unwrap(),
+
            cs.next().unwrap(),
+
            cs.next().unwrap(),
+
            cs,
+
        )
+
    }
+

+
    #[inline]
+
    pub fn to_owned<'b>(&self) -> Qualified<'b> {
+
        Qualified(Cow::Owned(self.0.clone().into_owned()))
+
    }
+

+
    #[inline]
+
    pub fn into_owned<'b>(self) -> Qualified<'b> {
+
        Qualified(Cow::Owned(self.0.into_owned()))
+
    }
+

+
    #[inline]
+
    pub fn into_refstring(self) -> RefString {
+
        self.into()
+
    }
+
}
+

+
impl Deref for Qualified<'_> {
+
    type Target = RefStr;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<RefStr> for Qualified<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &RefStr {
+
        self
+
    }
+
}
+

+
impl AsRef<str> for Qualified<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        self.0.as_str()
+
    }
+
}
+

+
impl AsRef<Self> for Qualified<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &Self {
+
        self
+
    }
+
}
+

+
impl<'a> From<Qualified<'a>> for Cow<'a, RefStr> {
+
    #[inline]
+
    fn from(q: Qualified<'a>) -> Self {
+
        q.0
+
    }
+
}
+

+
impl From<Qualified<'_>> for RefString {
+
    #[inline]
+
    fn from(q: Qualified) -> Self {
+
        q.0.into_owned()
+
    }
+
}
+

+
impl<T, U> From<(lit::Refs, T, U)> for Qualified<'_>
+
where
+
    T: AsRef<RefStr>,
+
    U: AsRef<RefStr>,
+
{
+
    #[inline]
+
    fn from((refs, cat, name): (lit::Refs, T, U)) -> Self {
+
        let refs: &RefStr = refs.into();
+
        Self(Cow::Owned(refs.join(cat).and(name)))
+
    }
+
}
+

+
impl<T> From<lit::RefsHeads<T>> for Qualified<'_>
+
where
+
    T: AsRef<RefStr>,
+
{
+
    #[inline]
+
    fn from((refs, heads, name): lit::RefsHeads<T>) -> Self {
+
        Self(Cow::Owned(
+
            IntoIterator::into_iter([Component::from(refs), heads.into()])
+
                .collect::<RefString>()
+
                .and(name),
+
        ))
+
    }
+
}
+

+
impl<T> From<lit::RefsTags<T>> for Qualified<'_>
+
where
+
    T: AsRef<RefStr>,
+
{
+
    #[inline]
+
    fn from((refs, tags, name): lit::RefsTags<T>) -> Self {
+
        Self(Cow::Owned(
+
            IntoIterator::into_iter([Component::from(refs), tags.into()])
+
                .collect::<RefString>()
+
                .and(name),
+
        ))
+
    }
+
}
+

+
impl<T> From<lit::RefsNotes<T>> for Qualified<'_>
+
where
+
    T: AsRef<RefStr>,
+
{
+
    #[inline]
+
    fn from((refs, notes, name): lit::RefsNotes<T>) -> Self {
+
        Self(Cow::Owned(
+
            IntoIterator::into_iter([Component::from(refs), notes.into()])
+
                .collect::<RefString>()
+
                .and(name),
+
        ))
+
    }
+
}
+

+
impl<T> From<lit::RefsRemotes<T>> for Qualified<'_>
+
where
+
    T: AsRef<RefStr>,
+
{
+
    #[inline]
+
    fn from((refs, remotes, name): lit::RefsRemotes<T>) -> Self {
+
        Self(Cow::Owned(
+
            IntoIterator::into_iter([Component::from(refs), remotes.into()])
+
                .collect::<RefString>()
+
                .and(name),
+
        ))
+
    }
+
}
+

+
impl Display for Qualified<'_> {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        self.0.fmt(f)
+
    }
+
}
+

+
#[cfg(feature = "link-literals")]
+
mod link {
+
    use super::*;
+

+
    impl From<lit::RefsRadId> for Qualified<'_> {
+
        #[inline]
+
        fn from((refs, rad, id): lit::RefsRadId) -> Self {
+
            Self(Cow::Owned(
+
                IntoIterator::into_iter([Component::from(refs), rad.into(), id.into()]).collect(),
+
            ))
+
        }
+
    }
+

+
    impl From<lit::RefsRadSelf> for Qualified<'_> {
+
        #[inline]
+
        fn from((refs, rad, selv): lit::RefsRadSelf) -> Self {
+
            Self(Cow::Owned(
+
                IntoIterator::into_iter([Component::from(refs), rad.into(), selv.into()]).collect(),
+
            ))
+
        }
+
    }
+

+
    impl From<lit::RefsRadSignedRefs> for Qualified<'_> {
+
        #[inline]
+
        fn from((refs, rad, sig): lit::RefsRadSignedRefs) -> Self {
+
            Self(Cow::Owned(
+
                IntoIterator::into_iter([Component::from(refs), rad.into(), sig.into()]).collect(),
+
            ))
+
        }
+
    }
+

+
    impl<'a, T: Into<Component<'a>>> From<lit::RefsRadIds<T>> for Qualified<'_> {
+
        #[inline]
+
        fn from((refs, rad, ids, id): lit::RefsRadIds<T>) -> Self {
+
            Self(Cow::Owned(
+
                IntoIterator::into_iter([Component::from(refs), rad.into(), ids.into(), id.into()])
+
                    .collect(),
+
            ))
+
        }
+
    }
+

+
    impl<'a, T: Into<Component<'a>>, I: Into<Component<'a>>> From<lit::RefsCobs<T, I>>
+
        for Qualified<'_>
+
    {
+
        #[inline]
+
        fn from((refs, cobs, ty, id): lit::RefsCobs<T, I>) -> Self {
+
            Self(Cow::Owned(
+
                IntoIterator::into_iter([Component::from(refs), cobs.into(), ty.into(), id.into()])
+
                    .collect(),
+
            ))
+
        }
+
    }
+
}
+

+
/// A [`Qualified`] ref under a git namespace.
+
///
+
/// A ref is namespaced if it starts with "refs/namespaces/", another path
+
/// component, and "refs/". Eg.
+
///
+
///     refs/namespaces/xyz/refs/heads/main
+
///
+
/// Note that namespaces can be nested, so the result of
+
/// [`Namespaced::strip_namespace`] may be convertible to a [`Namespaced`]
+
/// again. For example:
+
///
+
/// ```no_run
+
/// let full = refname!("refs/namespaces/a/refs/namespaces/b/refs/heads/main");
+
/// let namespaced = full.namespaced().unwrap();
+
/// let strip_first = namespaced.strip_namespace();
+
/// let nested = strip_first.namespaced().unwrap();
+
/// let strip_second = nested.strip_namespace();
+
///
+
/// assert_eq!("a", namespaced.namespace().as_str());
+
/// assert_eq!("b", nested.namespace().as_str());
+
/// assert_eq!("refs/namespaces/b/refs/heads/main", strip_first.as_str());
+
/// assert_eq!("refs/heads/main", strip_second.as_str());
+
/// ```
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Namespaced<'a>(Cow<'a, RefStr>);
+

+
impl<'a> Namespaced<'a> {
+
    pub fn namespace(&self) -> Component {
+
        self.components().nth(2).unwrap()
+
    }
+

+
    pub fn strip_namespace<'b>(&self) -> Qualified<'b> {
+
        const REFS_NAMESPACES: &RefStr = RefStr::from_str("refs/namespaces");
+

+
        Qualified(Cow::Owned(
+
            self.strip_prefix(REFS_NAMESPACES)
+
                .unwrap()
+
                .components()
+
                .skip(1)
+
                .collect(),
+
        ))
+
    }
+

+
    pub fn strip_namespace_recursive<'b>(&self) -> Qualified<'b> {
+
        let mut strip = self.strip_namespace();
+
        while let Some(ns) = strip.namespaced() {
+
            strip = ns.strip_namespace();
+
        }
+
        strip
+
    }
+

+
    #[inline]
+
    pub fn to_owned<'b>(&self) -> Namespaced<'b> {
+
        Namespaced(Cow::Owned(self.0.clone().into_owned()))
+
    }
+

+
    #[inline]
+
    pub fn into_owned<'b>(self) -> Namespaced<'b> {
+
        Namespaced(Cow::Owned(self.0.into_owned()))
+
    }
+

+
    #[inline]
+
    pub fn into_qualified(self) -> Qualified<'a> {
+
        self.into()
+
    }
+
}
+

+
impl Deref for Namespaced<'_> {
+
    type Target = RefStr;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<RefStr> for Namespaced<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &RefStr {
+
        self
+
    }
+
}
+

+
impl AsRef<str> for Namespaced<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        self.0.as_str()
+
    }
+
}
+

+
impl<'a> From<Namespaced<'a>> for Qualified<'a> {
+
    #[inline]
+
    fn from(ns: Namespaced<'a>) -> Self {
+
        Self(ns.0)
+
    }
+
}
+

+
impl<'a> From<&'a RefStr> for Option<Namespaced<'a>> {
+
    fn from(rs: &'a RefStr) -> Self {
+
        let mut cs = rs.iter();
+
        match (cs.next()?, cs.next()?, cs.next()?, cs.next()?) {
+
            ("refs", "namespaces", _, "refs") => Some(Namespaced(Cow::from(rs))),
+

+
            _ => None,
+
        }
+
    }
+
}
+

+
impl<'a, T> From<lit::RefsNamespaces<'_, T>> for Namespaced<'static>
+
where
+
    T: Into<Component<'a>>,
+
{
+
    #[inline]
+
    fn from((refs, namespaces, namespace, name): lit::RefsNamespaces<T>) -> Self {
+
        Self(Cow::Owned(
+
            IntoIterator::into_iter([refs.into(), namespaces.into(), namespace.into()])
+
                .collect::<RefString>()
+
                .and(name),
+
        ))
+
    }
+
}
+

+
impl Display for Namespaced<'_> {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        self.0.fmt(f)
+
    }
+
}
added git-ref-format/core/src/lib.rs
@@ -0,0 +1,25 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
mod check;
+
pub use check::{ref_format as check_ref_format, Error, Options};
+

+
mod deriv;
+
pub use deriv::{Namespaced, Qualified};
+

+
pub mod lit;
+

+
pub mod name;
+
#[cfg(feature = "percent-encoding")]
+
pub use name::PercentEncode;
+
pub use name::{Component, RefStr, RefString};
+

+
pub mod refspec;
+
pub use refspec::DuplicateGlob;
+

+
#[cfg(feature = "minicbor")]
+
mod cbor;
+
#[cfg(feature = "serde")]
+
mod serde;
added git-ref-format/core/src/lit.rs
@@ -0,0 +1,313 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use crate::{name, Qualified, RefStr};
+

+
/// A literal [`RefStr`].
+
///
+
/// Types implementing [`Lit`] must be [`name::Component`]s, and provide a
+
/// conversion from a component _iff_ the component's [`RefStr`] representation
+
/// is equal to [`Lit::NAME`]. Because these morphisms can only be guaranteed
+
/// axiomatically, the trait can not currently be implemented by types outside
+
/// of this crate.
+
///
+
/// [`Lit`] types are useful for efficiently creating known-valid [`Qualified`]
+
/// refs, and sometimes for pattern matching.
+
pub trait Lit: Sized + sealed::Sealed {
+
    const SELF: Self;
+
    const NAME: &'static RefStr;
+

+
    #[inline]
+
    fn from_component(c: &name::Component) -> Option<Self> {
+
        (c.as_ref() == Self::NAME).then(|| Self::SELF)
+
    }
+
}
+

+
impl<T: Lit> From<T> for &'static RefStr {
+
    #[inline]
+
    fn from(_: T) -> Self {
+
        T::NAME
+
    }
+
}
+

+
mod sealed {
+
    pub trait Sealed {}
+
}
+

+
/// All known literal [`RefStr`]s.
+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub enum KnownLit {
+
    Refs,
+
    Heads,
+
    Namespaces,
+
    Remotes,
+
    Tags,
+
    Notes,
+

+
    #[cfg(feature = "link-literals")]
+
    Rad,
+
    #[cfg(feature = "link-literals")]
+
    Id,
+
    #[cfg(feature = "link-literals")]
+
    Ids,
+
    #[cfg(feature = "link-literals")]
+
    Selv,
+
    #[cfg(feature = "link-literals")]
+
    SignedRefs,
+
    #[cfg(feature = "link-literals")]
+
    Cobs,
+
}
+

+
impl KnownLit {
+
    #[inline]
+
    pub fn from_component(c: &name::Component) -> Option<Self> {
+
        let c: &RefStr = c.as_ref();
+
        if c == Refs::NAME {
+
            Some(Self::Refs)
+
        } else if c == Heads::NAME {
+
            Some(Self::Heads)
+
        } else if c == Namespaces::NAME {
+
            Some(Self::Namespaces)
+
        } else if c == Remotes::NAME {
+
            Some(Self::Remotes)
+
        } else if c == Tags::NAME {
+
            Some(Self::Tags)
+
        } else if c == Notes::NAME {
+
            Some(Self::Notes)
+
        } else {
+
            #[cfg(feature = "link-literals")]
+
            {
+
                if c == Rad::NAME {
+
                    Some(Self::Rad)
+
                } else if c == Id::NAME {
+
                    Some(Self::Id)
+
                } else if c == Ids::NAME {
+
                    Some(Self::Ids)
+
                } else if c == Selv::NAME {
+
                    Some(Self::Selv)
+
                } else if c == SignedRefs::NAME {
+
                    Some(Self::SignedRefs)
+
                } else if c == Cobs::NAME {
+
                    Some(Self::Cobs)
+
                } else {
+
                    None
+
                }
+
            }
+
            #[cfg(not(feature = "link-literals"))]
+
            None
+
        }
+
    }
+
}
+

+
impl From<KnownLit> for name::Component<'_> {
+
    #[inline]
+
    fn from(k: KnownLit) -> Self {
+
        match k {
+
            KnownLit::Refs => Refs.into(),
+
            KnownLit::Heads => Heads.into(),
+
            KnownLit::Namespaces => Namespaces.into(),
+
            KnownLit::Remotes => Remotes.into(),
+
            KnownLit::Tags => Tags.into(),
+
            KnownLit::Notes => Notes.into(),
+
            #[cfg(feature = "link-literals")]
+
            KnownLit::Rad => Rad.into(),
+
            #[cfg(feature = "link-literals")]
+
            KnownLit::Id => Id.into(),
+
            #[cfg(feature = "link-literals")]
+
            KnownLit::Ids => Ids.into(),
+
            #[cfg(feature = "link-literals")]
+
            KnownLit::Selv => Selv.into(),
+
            #[cfg(feature = "link-literals")]
+
            KnownLit::SignedRefs => SignedRefs.into(),
+
            #[cfg(feature = "link-literals")]
+
            KnownLit::Cobs => Cobs.into(),
+
        }
+
    }
+
}
+

+
/// Either a [`KnownLit`] or a [`name::Component`]
+
pub enum SomeLit<'a> {
+
    Known(KnownLit),
+
    Any(name::Component<'a>),
+
}
+

+
impl SomeLit<'_> {
+
    pub fn known(self) -> Option<KnownLit> {
+
        match self {
+
            Self::Known(k) => Some(k),
+
            _ => None,
+
        }
+
    }
+
}
+

+
impl<'a> From<name::Component<'a>> for SomeLit<'a> {
+
    #[inline]
+
    fn from(c: name::Component<'a>) -> Self {
+
        match KnownLit::from_component(&c) {
+
            Some(k) => Self::Known(k),
+
            None => Self::Any(c),
+
        }
+
    }
+
}
+

+
pub type RefsHeads<T> = (Refs, Heads, T);
+
pub type RefsTags<T> = (Refs, Tags, T);
+
pub type RefsNotes<T> = (Refs, Notes, T);
+
pub type RefsRemotes<T> = (Refs, Remotes, T);
+
pub type RefsNamespaces<'a, T> = (Refs, Namespaces, T, Qualified<'a>);
+

+
#[inline]
+
pub fn refs_heads<T: AsRef<RefStr>>(name: T) -> RefsHeads<T> {
+
    (Refs, Heads, name)
+
}
+

+
#[inline]
+
pub fn refs_tags<T: AsRef<RefStr>>(name: T) -> RefsTags<T> {
+
    (Refs, Tags, name)
+
}
+

+
#[inline]
+
pub fn refs_notes<T: AsRef<RefStr>>(name: T) -> RefsNotes<T> {
+
    (Refs, Notes, name)
+
}
+

+
#[inline]
+
pub fn refs_remotes<T: AsRef<RefStr>>(name: T) -> RefsRemotes<T> {
+
    (Refs, Remotes, name)
+
}
+

+
#[inline]
+
pub fn refs_namespaces<'a, 'b, T>(namespace: T, name: Qualified<'b>) -> RefsNamespaces<'b, T>
+
where
+
    T: Into<name::Component<'a>>,
+
{
+
    (Refs, Namespaces, namespace, name)
+
}
+

+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Refs;
+

+
impl Lit for Refs {
+
    const SELF: Self = Self;
+
    const NAME: &'static RefStr = name::REFS;
+
}
+
impl sealed::Sealed for Refs {}
+

+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Heads;
+

+
impl Lit for Heads {
+
    const SELF: Self = Self;
+
    const NAME: &'static RefStr = name::HEADS;
+
}
+
impl sealed::Sealed for Heads {}
+

+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Namespaces;
+

+
impl Lit for Namespaces {
+
    const SELF: Self = Self;
+
    const NAME: &'static RefStr = name::NAMESPACES;
+
}
+
impl sealed::Sealed for Namespaces {}
+

+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Remotes;
+

+
impl Lit for Remotes {
+
    const SELF: Self = Self;
+
    const NAME: &'static RefStr = name::REMOTES;
+
}
+
impl sealed::Sealed for Remotes {}
+

+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Tags;
+

+
impl Lit for Tags {
+
    const SELF: Self = Self;
+
    const NAME: &'static RefStr = name::TAGS;
+
}
+
impl sealed::Sealed for Tags {}
+

+
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Notes;
+

+
impl Lit for Notes {
+
    const SELF: Self = Self;
+
    const NAME: &'static RefStr = name::NOTES;
+
}
+
impl sealed::Sealed for Notes {}
+

+
#[cfg(feature = "link-literals")]
+
mod link {
+
    use super::*;
+

+
    pub type RefsRadId = (Refs, Rad, Id);
+
    pub type RefsRadSelf = (Refs, Rad, Selv);
+
    pub type RefsRadSignedRefs = (Refs, Rad, SignedRefs);
+
    pub type RefsRadIds<T> = (Refs, Rad, Ids, T);
+
    pub type RefsCobs<T, I> = (Refs, Cobs, T, I);
+

+
    pub const REFS_RAD_ID: (Refs, Rad, Id) = (Refs, Rad, Id);
+
    pub const REFS_RAD_SELF: (Refs, Rad, Selv) = (Refs, Rad, Selv);
+
    pub const REFS_RAD_SIGNED_REFS: (Refs, Rad, SignedRefs) = (Refs, Rad, SignedRefs);
+

+
    #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
    pub struct Rad;
+

+
    impl Lit for Rad {
+
        const SELF: Self = Self;
+
        const NAME: &'static RefStr = RefStr::from_str("rad");
+
    }
+
    impl sealed::Sealed for Rad {}
+

+
    #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
    pub struct Id;
+

+
    impl Lit for Id {
+
        const SELF: Self = Self;
+
        const NAME: &'static RefStr = RefStr::from_str("id");
+
    }
+
    impl sealed::Sealed for Id {}
+

+
    #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
    pub struct Ids;
+

+
    impl Lit for Ids {
+
        const SELF: Self = Self;
+
        const NAME: &'static RefStr = RefStr::from_str("ids");
+
    }
+
    impl sealed::Sealed for Ids {}
+

+
    #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
    pub struct Selv;
+

+
    impl Lit for Selv {
+
        const SELF: Self = Self;
+
        const NAME: &'static RefStr = RefStr::from_str("self");
+
    }
+
    impl sealed::Sealed for Selv {}
+

+
    #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
    pub struct SignedRefs;
+

+
    impl Lit for SignedRefs {
+
        const SELF: Self = Self;
+
        const NAME: &'static RefStr = RefStr::from_str("signed_refs");
+
    }
+
    impl sealed::Sealed for SignedRefs {}
+

+
    #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
    pub struct Cobs;
+

+
    impl Lit for Cobs {
+
        const SELF: Self = Self;
+
        const NAME: &'static RefStr = RefStr::from_str("cobs");
+
    }
+
    impl sealed::Sealed for Cobs {}
+
}
+

+
#[cfg(feature = "link-literals")]
+
pub use link::*;
added git-ref-format/core/src/name.rs
@@ -0,0 +1,522 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    borrow::{Borrow, Cow},
+
    convert::TryFrom,
+
    fmt::{self, Display},
+
    iter::{Extend, FromIterator},
+
    ops::Deref,
+
};
+

+
use crate::{
+
    check,
+
    refspec::{PatternStr, PatternString},
+
    Namespaced,
+
    Qualified,
+
};
+

+
mod iter;
+
pub use iter::{component, Component, Components, Iter};
+

+
#[cfg(feature = "percent-encoding")]
+
pub use percent_encoding::PercentEncode;
+

+
pub const HEADS: &RefStr = RefStr::from_str(str::HEADS);
+
pub const MAIN: &RefStr = RefStr::from_str(str::MAIN);
+
pub const MASTER: &RefStr = RefStr::from_str(str::MASTER);
+
pub const NAMESPACES: &RefStr = RefStr::from_str(str::NAMESPACES);
+
pub const NOTES: &RefStr = RefStr::from_str(str::NOTES);
+
pub const ORIGIN: &RefStr = RefStr::from_str(str::ORIGIN);
+
pub const REFS: &RefStr = RefStr::from_str(str::REFS);
+
pub const REMOTES: &RefStr = RefStr::from_str(str::REMOTES);
+
pub const TAGS: &RefStr = RefStr::from_str(str::TAGS);
+

+
pub const REFS_HEADS_MAIN: &RefStr = RefStr::from_str(str::REFS_HEADS_MAIN);
+
pub const REFS_HEADS_MASTER: &RefStr = RefStr::from_str(str::REFS_HEADS_MASTER);
+

+
pub mod str {
+
    pub const HEADS: &str = "heads";
+
    pub const MAIN: &str = "main";
+
    pub const MASTER: &str = "master";
+
    pub const NAMESPACES: &str = "namespaces";
+
    pub const NOTES: &str = "notes";
+
    pub const ORIGIN: &str = "origin";
+
    pub const REFS: &str = "refs";
+
    pub const REMOTES: &str = "remotes";
+
    pub const TAGS: &str = "tags";
+

+
    pub const REFS_HEADS_MAIN: &str = "refs/heads/main";
+
    pub const REFS_HEADS_MASTER: &str = "refs/heads/master";
+

+
    #[cfg(feature = "link-literals")]
+
    mod link {
+
        pub const RAD: &str = "rad";
+
        pub const ID: &str = "id";
+
        pub const IDS: &str = "ids";
+
        pub const SELF: &str = "self";
+
        pub const SIGNED_REFS: &str = "signed_refs";
+
        pub const COBS: &str = "cobs";
+

+
        pub const REFS_RAD_ID: &str = "refs/rad/id";
+
        pub const REFS_RAD_SELF: &str = "refs/rad/self";
+
        pub const REFS_RAD_SIGNED_REFS: &str = "refs/rad/signed_refs";
+
    }
+
    #[cfg(feature = "link-literals")]
+
    pub use link::*;
+
}
+

+
pub mod bytes {
+
    use super::str;
+

+
    pub const HEADS: &[u8] = str::HEADS.as_bytes();
+
    pub const MAIN: &[u8] = str::MAIN.as_bytes();
+
    pub const MASTER: &[u8] = str::MASTER.as_bytes();
+
    pub const NAMESPACES: &[u8] = str::NAMESPACES.as_bytes();
+
    pub const NOTES: &[u8] = str::NOTES.as_bytes();
+
    pub const ORIGIN: &[u8] = str::ORIGIN.as_bytes();
+
    pub const REFS: &[u8] = str::REFS.as_bytes();
+
    pub const REMOTES: &[u8] = str::REMOTES.as_bytes();
+
    pub const TAGS: &[u8] = str::TAGS.as_bytes();
+

+
    pub const REFS_HEADS_MAIN: &[u8] = str::REFS_HEADS_MAIN.as_bytes();
+
    pub const REFS_HEADS_MASTER: &[u8] = str::REFS_HEADS_MASTER.as_bytes();
+

+
    #[cfg(feature = "link-literals")]
+
    mod link {
+
        use super::str;
+

+
        pub const RAD: &[u8] = str::RAD.as_bytes();
+
        pub const ID: &[u8] = str::ID.as_bytes();
+
        pub const IDS: &[u8] = str::IDS.as_bytes();
+
        pub const SELF: &[u8] = str::SELF.as_bytes();
+
        pub const SIGNED_REFS: &[u8] = str::SIGNED_REFS.as_bytes();
+
        pub const COBS: &[u8] = str::COBS.as_bytes();
+

+
        pub const REFS_RAD_ID: &[u8] = str::REFS_RAD_ID.as_bytes();
+
        pub const REFS_RAD_SELF: &[u8] = str::REFS_RAD_SELF.as_bytes();
+
        pub const REFS_RAD_SIGNED_REFS: &[u8] = str::REFS_RAD_SIGNED_REFS.as_bytes();
+
    }
+
    #[cfg(feature = "link-literals")]
+
    pub use link::*;
+
}
+

+
#[cfg(feature = "link-literals")]
+
mod link {
+
    use super::{str, RefStr};
+

+
    pub const RAD: &RefStr = RefStr::from_str(str::RAD);
+
    pub const ID: &RefStr = RefStr::from_str(str::ID);
+
    pub const IDS: &RefStr = RefStr::from_str(str::IDS);
+
    pub const SELF: &RefStr = RefStr::from_str(str::SELF);
+
    pub const SIGNED_REFS: &RefStr = RefStr::from_str(str::SIGNED_REFS);
+
    pub const COBS: &RefStr = RefStr::from_str(str::COBS);
+

+
    pub const REFS_RAD_ID: &RefStr = RefStr::from_str(str::REFS_RAD_ID);
+
    pub const REFS_RAD_SELF: &RefStr = RefStr::from_str(str::REFS_RAD_SELF);
+
    pub const REFS_RAD_SIGNED_REFS: &RefStr = RefStr::from_str(str::REFS_RAD_SIGNED_REFS);
+
}
+
#[cfg(feature = "link-literals")]
+
pub use link::*;
+

+
const CHECK_OPTS: check::Options = check::Options {
+
    allow_pattern: false,
+
    allow_onelevel: true,
+
};
+

+
#[repr(transparent)]
+
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct RefStr(str);
+

+
impl RefStr {
+
    pub fn try_from_str(s: &str) -> Result<&RefStr, check::Error> {
+
        TryFrom::try_from(s)
+
    }
+

+
    #[inline]
+
    pub fn as_str(&self) -> &str {
+
        self
+
    }
+

+
    #[inline]
+
    pub fn to_ref_string(&self) -> RefString {
+
        self.to_owned()
+
    }
+

+
    pub fn strip_prefix<P>(&self, base: P) -> Option<&RefStr>
+
    where
+
        P: AsRef<RefStr>,
+
    {
+
        self._strip_prefix(base.as_ref())
+
    }
+

+
    fn _strip_prefix(&self, base: &RefStr) -> Option<&RefStr> {
+
        self.0
+
            .strip_prefix(base.as_str())
+
            .and_then(|s| s.strip_prefix('/'))
+
            .map(Self::from_str)
+
    }
+

+
    /// Join `other` onto `self`, yielding a new [`RefString`].
+
    ///
+
    /// Consider to use [`RefString::and`] when chaining multiple fragments
+
    /// together, and the intermediate values are not needed.
+
    pub fn join<R>(&self, other: R) -> RefString
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        self._join(other.as_ref())
+
    }
+

+
    fn _join(&self, other: &RefStr) -> RefString {
+
        let mut buf = self.to_ref_string();
+
        buf.push(other);
+
        buf
+
    }
+

+
    pub fn to_pattern<P>(&self, pattern: P) -> PatternString
+
    where
+
        P: AsRef<PatternStr>,
+
    {
+
        self._to_pattern(pattern.as_ref())
+
    }
+

+
    fn _to_pattern(&self, pattern: &PatternStr) -> PatternString {
+
        self.to_owned().with_pattern(pattern)
+
    }
+

+
    #[inline]
+
    pub fn qualified(&self) -> Option<Qualified> {
+
        Qualified::from_refstr(self)
+
    }
+

+
    #[inline]
+
    pub fn namespaced(&self) -> Option<Namespaced> {
+
        self.into()
+
    }
+

+
    pub fn iter(&self) -> Iter {
+
        self.0.split('/')
+
    }
+

+
    pub fn components(&self) -> Components {
+
        Components::from(self)
+
    }
+

+
    pub fn head(&self) -> Component {
+
        self.components().next().expect("`RefStr` cannot be empty")
+
    }
+

+
    #[cfg(feature = "percent-encoding")]
+
    pub fn percent_encode(&self) -> PercentEncode {
+
        /// https://url.spec.whatwg.org/#fragment-percent-encode-set
+
        const FRAGMENT_PERCENT_ENCODE_SET: &percent_encoding::AsciiSet =
+
            &percent_encoding::CONTROLS
+
                .add(b' ')
+
                .add(b'"')
+
                .add(b'<')
+
                .add(b'>')
+
                .add(b'`');
+

+
        /// https://url.spec.whatwg.org/#path-percent-encode-set
+
        const PATH_PERCENT_ENCODE_SET: &percent_encoding::AsciiSet = &FRAGMENT_PERCENT_ENCODE_SET
+
            .add(b'#')
+
            .add(b'?')
+
            .add(b'{')
+
            .add(b'}');
+

+
        percent_encoding::utf8_percent_encode(self.as_str(), PATH_PERCENT_ENCODE_SET)
+
    }
+

+
    #[cfg(feature = "bstr")]
+
    #[inline]
+
    pub fn as_bstr(&self) -> &bstr::BStr {
+
        self.as_ref()
+
    }
+

+
    pub(crate) const fn from_str(s: &str) -> &RefStr {
+
        unsafe { &*(s as *const str as *const RefStr) }
+
    }
+
}
+

+
impl Deref for RefStr {
+
    type Target = str;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<str> for RefStr {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        self
+
    }
+
}
+

+
#[cfg(feature = "bstr")]
+
impl AsRef<bstr::BStr> for RefStr {
+
    #[inline]
+
    fn as_ref(&self) -> &bstr::BStr {
+
        use bstr::ByteSlice as _;
+
        self.as_str().as_bytes().as_bstr()
+
    }
+
}
+

+
impl<'a> AsRef<RefStr> for &'a RefStr {
+
    #[inline]
+
    fn as_ref(&self) -> &RefStr {
+
        self
+
    }
+
}
+

+
impl<'a> TryFrom<&'a str> for &'a RefStr {
+
    type Error = check::Error;
+

+
    #[inline]
+
    fn try_from(s: &'a str) -> Result<Self, Self::Error> {
+
        check::ref_format(CHECK_OPTS, s).map(|()| RefStr::from_str(s))
+
    }
+
}
+

+
impl<'a> From<&'a RefStr> for Cow<'a, RefStr> {
+
    #[inline]
+
    fn from(rs: &'a RefStr) -> Cow<'a, RefStr> {
+
        Cow::Borrowed(rs)
+
    }
+
}
+

+
impl Display for RefStr {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self)
+
    }
+
}
+

+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct RefString(String);
+

+
impl RefString {
+
    #[inline]
+
    pub fn as_refstr(&self) -> &RefStr {
+
        self
+
    }
+

+
    /// Join `other` onto `self` in place.
+
    ///
+
    /// This is a consuming version of [`RefString::push`] which can be chained.
+
    /// Prefer this over chaining calls to [`RefStr::join`] if the
+
    /// intermediate values are not neede.
+
    pub fn and<R>(self, other: R) -> Self
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        self._and(other.as_ref())
+
    }
+

+
    fn _and(mut self, other: &RefStr) -> Self {
+
        self.push(other);
+
        self
+
    }
+

+
    pub fn push<R>(&mut self, other: R)
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        self.0.push('/');
+
        self.0.push_str(other.as_ref().as_str());
+
    }
+

+
    #[inline]
+
    pub fn pop(&mut self) -> bool {
+
        match self.0.rfind('/') {
+
            None => false,
+
            Some(idx) => {
+
                self.0.truncate(idx);
+
                true
+
            },
+
        }
+
    }
+

+
    /// Append a [`PatternStr`], turning self into a new [`PatternString`].
+
    pub fn with_pattern<P>(self, pattern: P) -> PatternString
+
    where
+
        P: AsRef<PatternStr>,
+
    {
+
        self._with_pattern(pattern.as_ref())
+
    }
+

+
    fn _with_pattern(self, pattern: &PatternStr) -> PatternString {
+
        let mut buf = self.0;
+
        buf.push('/');
+
        buf.push_str(pattern.as_str());
+

+
        PatternString(buf)
+
    }
+

+
    #[inline]
+
    pub fn into_qualified<'a>(self) -> Option<Qualified<'a>> {
+
        Qualified::from_refstr(self)
+
    }
+

+
    #[inline]
+
    pub fn reserve(&mut self, additional: usize) {
+
        self.0.reserve(additional)
+
    }
+

+
    #[inline]
+
    pub fn shrink_to_fit(&mut self) {
+
        self.0.shrink_to_fit()
+
    }
+

+
    #[cfg(feature = "bstr")]
+
    #[inline]
+
    pub fn into_bstring(self) -> bstr::BString {
+
        self.into()
+
    }
+

+
    #[cfg(feature = "bstr")]
+
    #[inline]
+
    pub fn as_bstr(&self) -> &bstr::BStr {
+
        self.as_ref()
+
    }
+
}
+

+
impl Deref for RefString {
+
    type Target = RefStr;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        self.borrow()
+
    }
+
}
+

+
impl AsRef<RefStr> for RefString {
+
    #[inline]
+
    fn as_ref(&self) -> &RefStr {
+
        self
+
    }
+
}
+

+
impl AsRef<str> for RefString {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        self.0.as_str()
+
    }
+
}
+

+
#[cfg(feature = "bstr")]
+
impl AsRef<bstr::BStr> for RefString {
+
    #[inline]
+
    fn as_ref(&self) -> &bstr::BStr {
+
        use bstr::ByteSlice as _;
+
        self.as_str().as_bytes().as_bstr()
+
    }
+
}
+

+
impl Borrow<RefStr> for RefString {
+
    #[inline]
+
    fn borrow(&self) -> &RefStr {
+
        RefStr::from_str(self.0.as_str())
+
    }
+
}
+

+
impl ToOwned for RefStr {
+
    type Owned = RefString;
+

+
    #[inline]
+
    fn to_owned(&self) -> Self::Owned {
+
        RefString(self.0.to_owned())
+
    }
+
}
+

+
impl TryFrom<&str> for RefString {
+
    type Error = check::Error;
+

+
    #[inline]
+
    fn try_from(s: &str) -> Result<Self, Self::Error> {
+
        RefStr::try_from_str(s).map(ToOwned::to_owned)
+
    }
+
}
+

+
impl TryFrom<String> for RefString {
+
    type Error = check::Error;
+

+
    #[inline]
+
    fn try_from(s: String) -> Result<Self, Self::Error> {
+
        check::ref_format(CHECK_OPTS, s.as_str()).map(|()| RefString(s))
+
    }
+
}
+

+
impl<'a> From<&'a RefString> for Cow<'a, RefStr> {
+
    #[inline]
+
    fn from(rs: &'a RefString) -> Cow<'a, RefStr> {
+
        Cow::Borrowed(rs.as_refstr())
+
    }
+
}
+

+
impl<'a> From<RefString> for Cow<'a, RefStr> {
+
    #[inline]
+
    fn from(rs: RefString) -> Cow<'a, RefStr> {
+
        Cow::Owned(rs)
+
    }
+
}
+

+
impl From<RefString> for String {
+
    #[inline]
+
    fn from(rs: RefString) -> Self {
+
        rs.0
+
    }
+
}
+

+
#[cfg(feature = "bstr")]
+
impl From<RefString> for bstr::BString {
+
    #[inline]
+
    fn from(rs: RefString) -> Self {
+
        bstr::BString::from(rs.0.into_bytes())
+
    }
+
}
+

+
impl Display for RefString {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(&self.0)
+
    }
+
}
+

+
impl<A> FromIterator<A> for RefString
+
where
+
    A: AsRef<RefStr>,
+
{
+
    fn from_iter<T>(iter: T) -> Self
+
    where
+
        T: IntoIterator<Item = A>,
+
    {
+
        let mut buf = String::new();
+
        for c in iter {
+
            buf.push_str(c.as_ref().as_str());
+
            buf.push('/');
+
        }
+
        assert!(!buf.is_empty(), "empty iterator");
+
        buf.truncate(buf.len() - 1);
+

+
        Self(buf)
+
    }
+
}
+

+
impl<A> Extend<A> for RefString
+
where
+
    A: AsRef<RefStr>,
+
{
+
    fn extend<T>(&mut self, iter: T)
+
    where
+
        T: IntoIterator<Item = A>,
+
    {
+
        for x in iter {
+
            self.push(x)
+
        }
+
    }
+
}
added git-ref-format/core/src/name/iter.rs
@@ -0,0 +1,162 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    borrow::Cow,
+
    fmt::{self, Display},
+
    ops::Deref,
+
};
+

+
use super::{RefStr, RefString};
+
use crate::lit;
+

+
pub type Iter<'a> = std::str::Split<'a, char>;
+

+
/// Iterator created by the [`RefStr::iter`] method.
+
#[must_use = "iterators are lazy and do nothing unless consumed"]
+
#[derive(Clone)]
+
pub struct Components<'a> {
+
    inner: std::str::Split<'a, char>,
+
}
+

+
impl<'a> Iterator for Components<'a> {
+
    type Item = Component<'a>;
+

+
    #[inline]
+
    fn next(&mut self) -> Option<Self::Item> {
+
        self.inner
+
            .next()
+
            .map(RefStr::from_str)
+
            .map(Cow::from)
+
            .map(Component)
+
    }
+
}
+

+
impl<'a> From<&'a RefStr> for Components<'a> {
+
    #[inline]
+
    fn from(rs: &'a RefStr) -> Self {
+
        Self {
+
            inner: rs.as_str().split('/'),
+
        }
+
    }
+
}
+

+
/// A path component of a [`RefStr`].
+
///
+
/// A [`Component`] is a valid [`RefStr`] which does not contain any '/'
+
/// separators.
+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct Component<'a>(Cow<'a, RefStr>);
+

+
impl<'a> Component<'a> {
+
    #[inline]
+
    pub fn from_refstring(r: RefString) -> Option<Self> {
+
        if !r.contains('/') {
+
            Some(Self(Cow::Owned(r)))
+
        } else {
+
            None
+
        }
+
    }
+

+
    #[inline]
+
    pub fn as_lit<T: lit::Lit>(&self) -> Option<T> {
+
        T::from_component(self)
+
    }
+

+
    #[inline]
+
    pub fn into_inner(self) -> Cow<'a, RefStr> {
+
        self.0
+
    }
+
}
+

+
impl<'a> Deref for Component<'a> {
+
    type Target = RefStr;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<RefStr> for Component<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &RefStr {
+
        self
+
    }
+
}
+

+
impl<'a> From<&'a RefStr> for Option<Component<'a>> {
+
    #[inline]
+
    fn from(r: &'a RefStr) -> Self {
+
        if !r.contains('/') {
+
            Some(Component(Cow::from(r)))
+
        } else {
+
            None
+
        }
+
    }
+
}
+

+
impl<'a> From<Component<'a>> for Cow<'a, RefStr> {
+
    #[inline]
+
    fn from(c: Component<'a>) -> Self {
+
        c.0
+
    }
+
}
+

+
impl<T: lit::Lit> From<T> for Component<'static> {
+
    #[inline]
+
    fn from(_: T) -> Self {
+
        Component(Cow::from(T::NAME))
+
    }
+
}
+

+
impl<'a> From<lit::SomeLit<'a>> for Component<'a> {
+
    #[inline]
+
    fn from(s: lit::SomeLit<'a>) -> Self {
+
        use lit::SomeLit::*;
+

+
        match s {
+
            Known(k) => k.into(),
+
            Any(c) => c,
+
        }
+
    }
+
}
+

+
impl Display for Component<'_> {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self.0.as_str())
+
    }
+
}
+

+
pub mod component {
+
    use super::Component;
+
    use crate::name;
+
    use std::borrow::Cow;
+

+
    pub const HEADS: Component = Component(Cow::Borrowed(name::HEADS));
+
    pub const MAIN: Component = Component(Cow::Borrowed(name::MAIN));
+
    pub const MASTER: Component = Component(Cow::Borrowed(name::MASTER));
+
    pub const NAMESPACES: Component = Component(Cow::Borrowed(name::NAMESPACES));
+
    pub const NOTES: Component = Component(Cow::Borrowed(name::NOTES));
+
    pub const ORIGIN: Component = Component(Cow::Borrowed(name::ORIGIN));
+
    pub const REFS: Component = Component(Cow::Borrowed(name::REFS));
+
    pub const REMOTES: Component = Component(Cow::Borrowed(name::REMOTES));
+
    pub const TAGS: Component = Component(Cow::Borrowed(name::TAGS));
+

+
    #[cfg(feature = "link-literals")]
+
    mod link {
+
        use super::*;
+

+
        pub const RAD: Component = Component(Cow::Borrowed(name::RAD));
+
        pub const ID: Component = Component(Cow::Borrowed(name::ID));
+
        pub const IDS: Component = Component(Cow::Borrowed(name::IDS));
+
        pub const SELF: Component = Component(Cow::Borrowed(name::SELF));
+
        pub const SIGNED_REFS: Component = Component(Cow::Borrowed(name::SIGNED_REFS));
+
        pub const COBS: Component = Component(Cow::Borrowed(name::COBS));
+
    }
+
    #[cfg(feature = "link-literals")]
+
    pub use link::*;
+
}
added git-ref-format/core/src/refspec.rs
@@ -0,0 +1,294 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    borrow::{Borrow, Cow},
+
    convert::TryFrom,
+
    fmt::{self, Display},
+
    iter::FromIterator,
+
    ops::Deref,
+
};
+

+
use thiserror::Error;
+

+
use crate::{check, RefStr, RefString};
+

+
mod iter;
+
pub use iter::{Component, Components, Iter};
+

+
pub const STAR: &PatternStr = PatternStr::from_str("*");
+

+
const CHECK_OPTS: check::Options = check::Options {
+
    allow_onelevel: true,
+
    allow_pattern: true,
+
};
+

+
#[repr(transparent)]
+
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct PatternStr(str);
+

+
impl PatternStr {
+
    #[inline]
+
    pub fn try_from_str(s: &str) -> Result<&Self, check::Error> {
+
        TryFrom::try_from(s)
+
    }
+

+
    #[inline]
+
    pub fn as_str(&self) -> &str {
+
        self
+
    }
+

+
    pub fn join<R>(&self, other: R) -> PatternString
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        self._join(other.as_ref())
+
    }
+

+
    fn _join(&self, other: &RefStr) -> PatternString {
+
        let mut buf = self.to_owned();
+
        buf.push(other);
+
        buf
+
    }
+

+
    #[inline]
+
    pub fn iter(&self) -> Iter {
+
        self.0.split('/')
+
    }
+

+
    #[inline]
+
    pub fn components(&self) -> Components {
+
        Components::from(self)
+
    }
+

+
    pub(crate) const fn from_str(s: &str) -> &PatternStr {
+
        unsafe { &*(s as *const str as *const PatternStr) }
+
    }
+
}
+

+
impl Deref for PatternStr {
+
    type Target = str;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl AsRef<str> for PatternStr {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        self
+
    }
+
}
+

+
impl AsRef<Self> for PatternStr {
+
    #[inline]
+
    fn as_ref(&self) -> &Self {
+
        self
+
    }
+
}
+

+
impl<'a> TryFrom<&'a str> for &'a PatternStr {
+
    type Error = check::Error;
+

+
    #[inline]
+
    fn try_from(s: &'a str) -> Result<Self, Self::Error> {
+
        check::ref_format(CHECK_OPTS, s).map(|()| PatternStr::from_str(s))
+
    }
+
}
+

+
impl<'a> From<&'a RefStr> for &'a PatternStr {
+
    #[inline]
+
    fn from(rs: &'a RefStr) -> Self {
+
        PatternStr::from_str(rs.as_str())
+
    }
+
}
+

+
impl<'a> From<&'a PatternStr> for Cow<'a, PatternStr> {
+
    #[inline]
+
    fn from(p: &'a PatternStr) -> Cow<'a, PatternStr> {
+
        Cow::Borrowed(p)
+
    }
+
}
+

+
impl Display for PatternStr {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self)
+
    }
+
}
+

+
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
+
pub struct PatternString(pub(crate) String);
+

+
impl PatternString {
+
    #[inline]
+
    pub fn as_str(&self) -> &str {
+
        self.as_ref()
+
    }
+

+
    #[inline]
+
    pub fn as_pattern_str(&self) -> &PatternStr {
+
        self.as_ref()
+
    }
+

+
    #[inline]
+
    pub fn from_components<'a, T>(iter: T) -> Result<Self, DuplicateGlob>
+
    where
+
        T: IntoIterator<Item = Component<'a>>,
+
    {
+
        iter.into_iter().collect()
+
    }
+

+
    #[inline]
+
    pub fn and<R>(mut self, other: R) -> Self
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        self._push(other.as_ref());
+
        self
+
    }
+

+
    #[inline]
+
    pub fn push<R>(&mut self, other: R)
+
    where
+
        R: AsRef<RefStr>,
+
    {
+
        self._push(other.as_ref())
+
    }
+

+
    fn _push(&mut self, other: &RefStr) {
+
        self.0.push('/');
+
        self.0.push_str(other.as_str());
+
    }
+

+
    #[inline]
+
    pub fn pop(&mut self) -> bool {
+
        match self.0.rfind('/') {
+
            None => false,
+
            Some(idx) => {
+
                self.0.truncate(idx);
+
                true
+
            },
+
        }
+
    }
+
}
+

+
impl Deref for PatternString {
+
    type Target = PatternStr;
+

+
    #[inline]
+
    fn deref(&self) -> &Self::Target {
+
        self.borrow()
+
    }
+
}
+

+
impl AsRef<PatternStr> for PatternString {
+
    #[inline]
+
    fn as_ref(&self) -> &PatternStr {
+
        self
+
    }
+
}
+

+
impl AsRef<str> for PatternString {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        self.0.as_str()
+
    }
+
}
+

+
impl Borrow<PatternStr> for PatternString {
+
    #[inline]
+
    fn borrow(&self) -> &PatternStr {
+
        PatternStr::from_str(self.0.as_str())
+
    }
+
}
+

+
impl ToOwned for PatternStr {
+
    type Owned = PatternString;
+

+
    #[inline]
+
    fn to_owned(&self) -> Self::Owned {
+
        PatternString(self.0.to_owned())
+
    }
+
}
+

+
impl From<RefString> for PatternString {
+
    #[inline]
+
    fn from(rs: RefString) -> Self {
+
        Self(rs.into())
+
    }
+
}
+

+
impl<'a> From<&'a PatternString> for Cow<'a, PatternStr> {
+
    #[inline]
+
    fn from(p: &'a PatternString) -> Cow<'a, PatternStr> {
+
        Cow::Borrowed(p.as_ref())
+
    }
+
}
+

+
impl From<PatternString> for String {
+
    #[inline]
+
    fn from(p: PatternString) -> Self {
+
        p.0
+
    }
+
}
+

+
impl TryFrom<&str> for PatternString {
+
    type Error = check::Error;
+

+
    #[inline]
+
    fn try_from(s: &str) -> Result<Self, Self::Error> {
+
        PatternStr::try_from_str(s).map(ToOwned::to_owned)
+
    }
+
}
+

+
impl TryFrom<String> for PatternString {
+
    type Error = check::Error;
+

+
    #[inline]
+
    fn try_from(s: String) -> Result<Self, Self::Error> {
+
        check::ref_format(CHECK_OPTS, s.as_str()).map(|()| PatternString(s))
+
    }
+
}
+

+
#[derive(Debug, Error)]
+
#[error("more than one '*' encountered")]
+
pub struct DuplicateGlob;
+

+
impl<'a> FromIterator<Component<'a>> for Result<PatternString, DuplicateGlob> {
+
    fn from_iter<T>(iter: T) -> Self
+
    where
+
        T: IntoIterator<Item = Component<'a>>,
+
    {
+
        use Component::*;
+

+
        let mut buf = String::new();
+
        let mut seen_glob = false;
+
        for c in iter {
+
            if let Glob(_) = c {
+
                if seen_glob {
+
                    return Err(DuplicateGlob);
+
                }
+

+
                seen_glob = true;
+
            }
+

+
            buf.push_str(c.as_str());
+
            buf.push('/');
+
        }
+
        buf.truncate(buf.len() - 1);
+

+
        Ok(PatternString(buf))
+
    }
+
}
+

+
impl Display for PatternString {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(&self.0)
+
    }
+
}
added git-ref-format/core/src/refspec/iter.rs
@@ -0,0 +1,69 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::fmt::{self, Display};
+

+
use super::PatternStr;
+
use crate::RefStr;
+

+
pub type Iter<'a> = std::str::Split<'a, char>;
+

+
pub enum Component<'a> {
+
    Glob(Option<&'a PatternStr>),
+
    Normal(&'a RefStr),
+
}
+

+
impl Component<'_> {
+
    #[inline]
+
    pub fn as_str(&self) -> &str {
+
        self.as_ref()
+
    }
+
}
+

+
impl AsRef<str> for Component<'_> {
+
    #[inline]
+
    fn as_ref(&self) -> &str {
+
        match self {
+
            Self::Glob(None) => "*",
+
            Self::Glob(Some(x)) => x.as_str(),
+
            Self::Normal(x) => x.as_str(),
+
        }
+
    }
+
}
+

+
impl Display for Component<'_> {
+
    #[inline]
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        f.write_str(self.as_str())
+
    }
+
}
+

+
#[must_use = "iterators are lazy and do nothing unless consumed"]
+
#[derive(Clone)]
+
pub struct Components<'a> {
+
    inner: Iter<'a>,
+
}
+

+
impl<'a> Iterator for Components<'a> {
+
    type Item = Component<'a>;
+

+
    #[inline]
+
    fn next(&mut self) -> Option<Self::Item> {
+
        self.inner.next().map(|next| match next {
+
            "*" => Component::Glob(None),
+
            x if x.contains('*') => Component::Glob(Some(PatternStr::from_str(x))),
+
            x => Component::Normal(RefStr::from_str(x)),
+
        })
+
    }
+
}
+

+
impl<'a> From<&'a PatternStr> for Components<'a> {
+
    #[inline]
+
    fn from(p: &'a PatternStr) -> Self {
+
        Self {
+
            inner: p.as_str().split('/'),
+
        }
+
    }
+
}
added git-ref-format/core/src/serde.rs
@@ -0,0 +1,144 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::convert::TryFrom;
+

+
use ::serde::{de, Deserialize, Deserializer, Serialize, Serializer};
+

+
use crate::{
+
    refspec::{PatternStr, PatternString},
+
    Namespaced,
+
    Qualified,
+
    RefStr,
+
    RefString,
+
};
+

+
impl<'de: 'a, 'a> Deserialize<'de> for &'a RefStr {
+
    #[inline]
+
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
    where
+
        D: Deserializer<'de>,
+
    {
+
        Deserialize::deserialize(deserializer)
+
            .and_then(|s: &str| Self::try_from(s).map_err(de::Error::custom))
+
    }
+
}
+

+
impl<'a> Serialize for &'a RefStr {
+
    #[inline]
+
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
    where
+
        S: Serializer,
+
    {
+
        serializer.serialize_str(self.as_str())
+
    }
+
}
+

+
impl<'de> Deserialize<'de> for RefString {
+
    #[inline]
+
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
    where
+
        D: Deserializer<'de>,
+
    {
+
        Deserialize::deserialize(deserializer).map(|x: &RefStr| x.to_owned())
+
    }
+
}
+

+
impl Serialize for RefString {
+
    #[inline]
+
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
    where
+
        S: Serializer,
+
    {
+
        serializer.serialize_str(self.as_str())
+
    }
+
}
+

+
impl<'de: 'a, 'a> Deserialize<'de> for &'a PatternStr {
+
    #[inline]
+
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
    where
+
        D: Deserializer<'de>,
+
    {
+
        Deserialize::deserialize(deserializer)
+
            .and_then(|s: &str| Self::try_from(s).map_err(de::Error::custom))
+
    }
+
}
+

+
impl<'a> Serialize for &'a PatternStr {
+
    #[inline]
+
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
    where
+
        S: Serializer,
+
    {
+
        serializer.serialize_str(self.as_str())
+
    }
+
}
+

+
impl<'de> Deserialize<'de> for PatternString {
+
    #[inline]
+
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
    where
+
        D: Deserializer<'de>,
+
    {
+
        Deserialize::deserialize(deserializer).map(|x: &PatternStr| x.to_owned())
+
    }
+
}
+

+
impl Serialize for PatternString {
+
    #[inline]
+
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
    where
+
        S: Serializer,
+
    {
+
        serializer.serialize_str(self.as_str())
+
    }
+
}
+

+
impl<'de: 'a, 'a> Deserialize<'de> for Qualified<'a> {
+
    #[inline]
+
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
    where
+
        D: Deserializer<'de>,
+
    {
+
        Deserialize::deserialize(deserializer).and_then(|s: &RefStr| {
+
            s.qualified()
+
                .ok_or_else(|| de::Error::custom("not a qualified ref"))
+
        })
+
    }
+
}
+

+
impl Serialize for Qualified<'_> {
+
    #[inline]
+
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
    where
+
        S: Serializer,
+
    {
+
        serializer.serialize_str(self.as_str())
+
    }
+
}
+

+
impl<'de: 'a, 'a> Deserialize<'de> for Namespaced<'a> {
+
    #[inline]
+
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+
    where
+
        D: Deserializer<'de>,
+
    {
+
        Deserialize::deserialize(deserializer).and_then(|s: &RefStr| {
+
            s.namespaced()
+
                .ok_or_else(|| de::Error::custom("not a namespaced ref"))
+
        })
+
    }
+
}
+

+
impl Serialize for Namespaced<'_> {
+
    #[inline]
+
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+
    where
+
        S: Serializer,
+
    {
+
        serializer.serialize_str(self.as_str())
+
    }
+
}
added git-ref-format/macro/Cargo.toml
@@ -0,0 +1,19 @@
+
[package]
+
name = "git-ref-format-macro"
+
version = "0.1.0"
+
authors = ["Kim Altintop <kim@eagain.st>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+

+
[lib]
+
doctest = false
+
proc-macro = true
+
test = false
+

+
[dependencies]
+
proc-macro-error = "1.0.4"
+
quote = "1"
+
syn = "1"
+

+
[dependencies.git-ref-format-core]
+
path = "../core"
added git-ref-format/macro/src/lib.rs
@@ -0,0 +1,122 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
#[macro_use]
+
extern crate proc_macro_error;
+

+
use std::convert::TryInto;
+

+
use proc_macro::TokenStream;
+
use proc_macro_error::abort;
+
use quote::quote;
+
use syn::{parse_macro_input, LitStr};
+

+
use git_ref_format_core::{refspec::PatternStr, Component, Error, RefStr};
+

+
/// Create a [`git_ref_format_core::RefString`] from a string literal.
+
///
+
/// The string is validated at compile time, and an unsafe conversion is
+
/// emitted.
+
#[proc_macro_error]
+
#[proc_macro]
+
pub fn refname(input: TokenStream) -> TokenStream {
+
    let lit = parse_macro_input!(input as LitStr);
+
    let val = lit.value();
+

+
    let parsed: Result<&RefStr, Error> = val.as_str().try_into();
+
    match parsed {
+
        Ok(safe) => {
+
            let safe: &str = safe.as_str();
+
            let expand = quote! {
+
                unsafe {
+
                    use ::std::mem::transmute;
+
                    use ::git_ref_format::RefString;
+

+
                    transmute::<_, RefString>(#safe.to_owned())
+
                }
+
            };
+
            TokenStream::from(expand)
+
        },
+

+
        Err(e) => {
+
            abort!(lit.span(), "invalid refname literal: {}", e);
+
        },
+
    }
+
}
+

+
/// Create a [`git_ref_format_core::Component`] from a string literal.
+
///
+
/// The string is validated at compile time, and an unsafe conversion is
+
/// emitted.
+
#[proc_macro_error]
+
#[proc_macro]
+
pub fn component(input: TokenStream) -> TokenStream {
+
    let lit = parse_macro_input!(input as LitStr);
+
    let val = lit.value();
+

+
    let name: Result<&RefStr, Error> = val.as_str().try_into();
+
    match name {
+
        Ok(name) => {
+
            let comp: Option<Component> = name.into();
+
            match comp {
+
                Some(safe) => {
+
                    let safe: &str = safe.as_ref().as_str();
+
                    let expand = quote! {
+
                        unsafe {
+
                            use ::std::{borrow::Cow, mem::transmute};
+
                            use ::git_ref_format::{Component, RefStr, RefString};
+

+
                            let inner: RefString = transmute(#safe.to_owned());
+
                            let cow: Cow<'static, RefStr> = Cow::Owned(inner);
+
                            transmute::<_, Component>(cow)
+
                        }
+
                    };
+

+
                    TokenStream::from(expand)
+
                },
+

+
                None => {
+
                    abort!(lit.span(), "component contains a '/'");
+
                },
+
            }
+
        },
+

+
        Err(e) => {
+
            abort!(lit.span(), "invalid refname literal: {}", e);
+
        },
+
    }
+
}
+

+
/// Create a [`git_ref_format_core::refspec::PatternString`] from a string
+
/// literal.
+
///
+
/// The string is validated at compile time, and an unsafe conversion is
+
/// emitted.
+
#[proc_macro_error]
+
#[proc_macro]
+
pub fn pattern(input: TokenStream) -> TokenStream {
+
    let lit = parse_macro_input!(input as LitStr);
+
    let val = lit.value();
+

+
    let parsed: Result<&PatternStr, Error> = val.as_str().try_into();
+
    match parsed {
+
        Ok(safe) => {
+
            let safe: &str = safe.as_str();
+
            let expand = quote! {
+
                unsafe {
+
                    use ::std::mem::transmute;
+
                    use ::git_ref_format::refspec::PatternString;
+

+
                    transmute::<_, PatternString>(#safe.to_owned())
+
                }
+
            };
+
            TokenStream::from(expand)
+
        },
+

+
        Err(e) => {
+
            abort!(lit.span(), "invalid refspec pattern literal: {}", e);
+
        },
+
    }
+
}
added git-ref-format/src/lib.rs
@@ -0,0 +1,155 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
//! Everything you never knew you wanted for handling git ref names.
+
//!
+
//! # Overview
+
//!
+
//! This crate provides a number of types which allow to validate git ref names,
+
//! create new ones which are valid by construction, make assertions about their
+
//! structure, and deconstruct them into their components.
+
//!
+
//! ## Basic Types
+
//!
+
//! The basic types are:
+
//!
+
//! * [`RefStr`]
+
//! * [`RefString`]
+
//!
+
//! They are wrappers around [`str`] and [`String`] respectively, with the
+
//! additional guarantee that they are also valid ref names as per
+
//! [`git-check-ref-format`] (which is also exposed directly as
+
//! [`check_ref_format`]). Both types are referred to as "ref strings".
+
//!
+
//! Note that this implies that ref names must be valid UTF-8, which git itself
+
//! doesn't require.
+
//!
+
//! Ref strings can be iterated over, either yielding `&str` or [`Component`]. A
+
//! [`Component`] is guaranteed to not contain a '/' separator, and can thus
+
//! also be used to conveniently construct known-valid ref strings. The [`lit`]
+
//! module contains a number of types (and `const` values thereof) which can be
+
//! coerced into [`Component`], and thus can be used to construct known-valid
+
//! ref strings.
+
//!
+
//! The [`name`] module also provides a number of constant values of commonly
+
//! used ref strings / components, which are useful for pattern matching.
+
//!
+
//! The `"macro"` feature enables the `refstring!` and `component!` macros,
+
//! which can be convenient to construct compile-time validated [`RefString`]s
+
//! respectively [`Component`]s.
+
//!
+
//! ## Refspec Patterns
+
//!
+
//! The types
+
//!
+
//! * [`refspec::PatternStr`]
+
//! * [`refspec::PatternString`]
+
//!
+
//! guarantee that their values are valid ref strings but additionally _may_
+
//! contain at most one "*" character. It is thus possible to convert a ref
+
//! string to a refspec pattern, but not the other way round. Refspec patterns
+
//! are commonly used for mapping remote to local refs (cf. [`git-fetch`]).
+
//!
+
//! The `"macro"` feature enables the `refspec::pattern!` macro, which
+
//! constructs a compile-time validated [`refspec::PatternString`].
+
//!
+
//! ## Structured Ref Strings
+
//!
+
//! Ref strings may be [`Qualified`], which essentially means that they start
+
//! with "refs/". [`Qualified`] ref string also require at least three
+
//! components (eg. "refs/heads/main"), which makes it easier to deal with
+
//! common naming conventions.
+
//!
+
//! [`Qualified`] refs may be [`Namespaced`], or can be given a namespace
+
//! (namespaces can be nested). [`Namespaced`] refs are also [`Qualified`], and
+
//! can have their namespace(s) stripped.
+
//!
+
//! # On Git Ref Name Conventions
+
//!
+
//! Git references are essentially path names pointing to their traditional
+
//! storage location in a the repository (`$GIT_DIR/refs`). Unlike (UNIX) file
+
//! paths, they are subject to a few restrictions, as described in
+
//! [`git-check-ref-format`].
+
//!
+
//! On top of that, there are a number of conventions around the hierarchical
+
//! naming, _some_ of which are treated specially by tools such as the `git`
+
//! CLI. For example:
+
//!
+
//! * `refs/heads/..` are also called "branches".
+
//!
+
//!   Omitting the "refs/heads/" prefix is typically accepted. Such a branch
+
//!   name is also referred to as a "shorthand" ref.
+
//!
+
//! * `refs/tags/..` are assumed to contain tags.
+
//!
+
//!   `git` treats tags specially, specifically it insists that they be globally
+
//!   unique across all   copies of the repository.
+
//!
+
//! * `refs/remotes/../..` is where "remote tracking branches" are stored.
+
//!
+
//!   In `git`, the first element after "remotes" is considered the name of the
+
//!   [remote][git-remote] (as it appears in the config file), while everything
+
//!   after that is considered a shorthand branch. Note, however, that the
+
//!   remote name may itself contain '/' separators, so it is not generally
+
//!   possible to extract  the branch name without access to the config.
+
//!
+
//! * `refs/namespaces/..` is hidden unless [`gitnamespaces`] are in effect.
+
//!
+
//!   The structure of namespaces is recursive: they contain full refs, which
+
//!   can themselves be namespaces (eg.
+
//!   `refs/namespaces/a/refs/namespaces/b/refs/heads/branch`). Note that,
+
//!   unlike remote names,  namespace names can **not** contain forward slashes
+
//!   but there is no tooling which would enforce that.
+
//!
+
//! There are also other such ref hierachies `git` knows about, and this crate
+
//! doesn't attempt to cover all of them. More importantly, `git` does not
+
//! impose any restrictions on ref hierarchies: as long as they don't collide
+
//! with convential ones, applications can introduce any hierchies they want.
+
//!
+
//! This restricts the transformations between conventional refs which can be
+
//! made without additional information besides the ref name: for example, it is
+
//! not generally possible to turn a remote tracking branch into a branch (or a
+
//! shorthand) without knowning about all possible remote names.
+
//!
+
//! Therefore, this crate doesn't attempt to interpret all possible semantics
+
//! associated with refs, and instead tries to make it easy for library
+
//! consumers to do so.
+
//!
+
//! [`git-check-ref-format`]: https://git-scm.com/docs/git-check-ref-format
+
//! [`git-fetch`]: https://git-scm.com/docs/git-fetch
+
//! [git-remote]: https://git-scm.com/docs/git-remote
+
//! [`gitnamespaces`]: https://git-scm.com/docs/gitnamespaces
+
#[cfg(feature = "percent-encoding")]
+
pub use git_ref_format_core::PercentEncode;
+
pub use git_ref_format_core::{
+
    check_ref_format,
+
    lit,
+
    name::component,
+
    Component,
+
    DuplicateGlob,
+
    Error,
+
    Namespaced,
+
    Options,
+
    Qualified,
+
    RefStr,
+
    RefString,
+
};
+

+
pub mod name {
+
    pub use git_ref_format_core::name::*;
+

+
    #[cfg(any(feature = "macro", feature = "git-ref-format-macro"))]
+
    pub use git_ref_format_macro::component;
+
}
+

+
#[cfg(any(feature = "macro", feature = "git-ref-format-macro"))]
+
pub use git_ref_format_macro::refname;
+

+
pub mod refspec {
+
    pub use git_ref_format_core::refspec::*;
+

+
    #[cfg(any(feature = "macro", feature = "git-ref-format-macro"))]
+
    pub use git_ref_format_macro::pattern;
+
}
added git-ref-format/t/Cargo.toml
@@ -0,0 +1,28 @@
+
[package]
+
name = "git-ref-format-test"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+

+
publish = false
+

+
[lib]
+
doctest = false
+
test = true
+
doc = false
+

+
[features]
+
test = []
+

+
[dependencies]
+
proptest = "1"
+

+
[dev-dependencies]
+
assert_matches = "1.5"
+

+
[dev-dependencies.git-ref-format]
+
path = ".."
+
features = ["macro", "minicbor", "serde"]
+

+
[dev-dependencies.test-helpers]
+
path = "../../test/test-helpers"
added git-ref-format/t/src/gen.rs
@@ -0,0 +1,101 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
use proptest::prelude::*;
+

+
/// Any unicode "word" is trivially a valid refname.
+
pub fn trivial() -> impl Strategy<Value = String> {
+
    "\\w+"
+
}
+

+
pub fn valid() -> impl Strategy<Value = String> {
+
    prop::collection::vec(trivial(), 1..20).prop_map(|xs| xs.join("/"))
+
}
+

+
pub fn invalid_char() -> impl Strategy<Value = char> {
+
    prop_oneof![
+
        Just('\0'),
+
        Just('\\'),
+
        Just('~'),
+
        Just('^'),
+
        Just(':'),
+
        Just('?'),
+
        Just('[')
+
    ]
+
}
+

+
pub fn with_invalid_char() -> impl Strategy<Value = String> {
+
    ("\\w*", invalid_char(), "\\w*").prop_map(|(mut pre, invalid, suf)| {
+
        pre.push(invalid);
+
        pre.push_str(&suf);
+
        pre
+
    })
+
}
+

+
pub fn ends_with_dot_lock() -> impl Strategy<Value = String> {
+
    "\\w*\\.lock"
+
}
+

+
pub fn with_double_dot() -> impl Strategy<Value = String> {
+
    "\\w*\\.\\.\\w*"
+
}
+

+
pub fn starts_with_dot() -> impl Strategy<Value = String> {
+
    "\\.\\w*"
+
}
+

+
pub fn ends_with_dot() -> impl Strategy<Value = String> {
+
    "\\w+\\."
+
}
+

+
pub fn with_control_char() -> impl Strategy<Value = String> {
+
    "\\w*[\x01-\x1F\x7F]+\\w*"
+
}
+

+
pub fn with_space() -> impl Strategy<Value = String> {
+
    "\\w* +\\w*"
+
}
+

+
pub fn with_consecutive_slashes() -> impl Strategy<Value = String> {
+
    "\\w*//\\w*"
+
}
+

+
pub fn with_glob() -> impl Strategy<Value = String> {
+
    "\\w*\\*\\w*"
+
}
+

+
pub fn multi_glob() -> impl Strategy<Value = String> {
+
    (
+
        prop::collection::vec(with_glob(), 2..5),
+
        prop::collection::vec(trivial(), 0..5),
+
    )
+
        .prop_map(|(mut globs, mut valids)| {
+
            globs.append(&mut valids);
+
            globs
+
        })
+
        .prop_shuffle()
+
        .prop_map(|xs| xs.join("/"))
+
}
+

+
pub fn invalid() -> impl Strategy<Value = String> {
+
    fn path(s: impl Strategy<Value = String>) -> impl Strategy<Value = String> {
+
        prop::collection::vec(s, 1..20).prop_map(|xs| xs.join("/"))
+
    }
+

+
    prop_oneof![
+
        Just(String::from("")),
+
        Just(String::from("@")),
+
        path(with_invalid_char()),
+
        path(ends_with_dot_lock()),
+
        path(with_double_dot()),
+
        path(starts_with_dot()),
+
        path(ends_with_dot()),
+
        path(with_control_char()),
+
        path(with_space()),
+
        path(with_consecutive_slashes()),
+
        path(trivial()).prop_map(|mut p| {
+
            p.push('/');
+
            p
+
        }),
+
    ]
+
}
added git-ref-format/t/src/lib.rs
@@ -0,0 +1,13 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
#[cfg(test)]
+
#[macro_use]
+
extern crate assert_matches;
+

+
#[cfg(any(test, feature = "test"))]
+
pub mod gen;
+
#[cfg(test)]
+
mod properties;
+
#[cfg(test)]
+
mod tests;
added git-ref-format/t/src/properties.rs
@@ -0,0 +1,25 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
use git_ref_format::{check_ref_format, Error, Options};
+
use proptest::prelude::*;
+

+
use crate::gen;
+

+
mod name;
+
mod pattern;
+

+
proptest! {
+
    #[test]
+
    fn disallow_onelevel(input in gen::trivial(), allow_pattern in any::<bool>()) {
+
        assert_matches!(
+
            check_ref_format(Options {
+
                    allow_onelevel: false,
+
                    allow_pattern,
+
                },
+
                &input
+
            ),
+
            Err(Error::OneLevel)
+
        )
+
    }
+
}
added git-ref-format/t/src/properties/name.rs
@@ -0,0 +1,85 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
use std::convert::TryFrom;
+

+
use git_ref_format::{Error, RefStr, RefString};
+
use proptest::prelude::*;
+
use test_helpers::roundtrip;
+

+
use crate::gen;
+

+
proptest! {
+
    #[test]
+
    fn valid(input in gen::valid()) {
+
        assert_eq!(input.as_str(), RefStr::try_from_str(&input).unwrap().as_str())
+
    }
+

+
    #[test]
+
    fn invalid_char(input in gen::with_invalid_char()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::InvalidChar(_)))
+
    }
+

+
    #[test]
+
    fn dot_lock(input in gen::ends_with_dot_lock()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::DotLock))
+
    }
+

+
    #[test]
+
    fn double_dot(input in gen::with_double_dot()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::DotDot))
+
    }
+

+
    #[test]
+
    fn starts_dot(input in gen::starts_with_dot()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::StartsDot))
+
    }
+

+
    #[test]
+
    fn ends_dot(input in gen::ends_with_dot()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::EndsDot))
+
    }
+

+
    #[test]
+
    fn control_char(input in gen::with_control_char()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::Control))
+
    }
+

+
    #[test]
+
    fn space(input in gen::with_space()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::Space))
+
    }
+

+
    #[test]
+
    fn consecutive_slashes(input in gen::with_consecutive_slashes()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::Slash))
+
    }
+

+
    #[test]
+
    fn glob(input in gen::with_glob()) {
+
        assert_matches!(RefString::try_from(input), Err(Error::InvalidChar('*')))
+
    }
+

+
    #[test]
+
    fn invalid(input in gen::invalid()) {
+
        assert_matches!(RefString::try_from(input), Err(_))
+
    }
+

+
    #[test]
+
    fn roundtrip_components(input in gen::valid()) {
+
        assert_eq!(
+
            input.as_str(),
+
            RefStr::try_from_str(&input).unwrap().components().collect::<RefString>().as_str()
+
        )
+
    }
+

+
    #[test]
+
    fn json(input in gen::valid()) {
+
       roundtrip::json(RefString::try_from(input).unwrap())
+
    }
+

+
    #[test]
+
    fn cbor(input in gen::valid()) {
+
        roundtrip::cbor(RefString::try_from(input).unwrap())
+
    }
+
}
added git-ref-format/t/src/properties/pattern.rs
@@ -0,0 +1,55 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
use std::convert::TryFrom;
+

+
use git_ref_format::{refspec, Error};
+
use proptest::prelude::*;
+
use test_helpers::roundtrip;
+

+
use crate::gen;
+

+
proptest! {
+
    #[test]
+
    fn valid(input in gen::with_glob()) {
+
        assert_eq!(input.as_str(), refspec::PatternStr::try_from_str(&input).unwrap().as_str())
+
    }
+

+
    #[test]
+
    fn refname_is_pattern(input in gen::valid()) {
+
        assert_eq!(input.as_str(), refspec::PatternStr::try_from_str(&input).unwrap().as_str())
+
    }
+

+
    #[test]
+
    fn no_more_than_one_star(input in gen::multi_glob()) {
+
        assert_matches!(refspec::PatternString::try_from(input), Err(Error::Pattern))
+
    }
+

+
    #[test]
+
    fn invalid_refname_is_invalid_pattern(input in gen::invalid()) {
+
        assert_matches!(refspec::PatternString::try_from(input), Err(_))
+
    }
+

+
    #[test]
+
    fn roundtrip_components(input in gen::with_glob()) {
+
        assert_eq!(
+
            input.as_str(),
+
            refspec::PatternStr::try_from_str(&input)
+
                .unwrap()
+
                .components()
+
                .collect::<Result<refspec::PatternString, _>>()
+
                .unwrap()
+
                .as_str()
+
        )
+
    }
+

+
    #[test]
+
    fn json(input in gen::with_glob()) {
+
        roundtrip::json(refspec::PatternString::try_from(input).unwrap())
+
    }
+

+
    #[test]
+
    fn cbor(input in gen::with_glob()) {
+
        roundtrip::cbor(refspec::PatternString::try_from(input).unwrap())
+
    }
+
}
added git-ref-format/t/src/tests.rs
@@ -0,0 +1,281 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
use git_ref_format::{component, name, refname, refspec, Error, Qualified, RefStr, RefString};
+

+
#[test]
+
fn refname_macro_works() {
+
    assert_eq!("refs/heads/main", refname!("refs/heads/main").as_str())
+
}
+

+
#[test]
+
fn component_macro_works() {
+
    assert_eq!("self", name::component!("self").as_str())
+
}
+

+
#[test]
+
fn pattern_macro_works() {
+
    assert_eq!("refs/heads/*", refspec::pattern!("refs/heads/*").as_str())
+
}
+

+
#[test]
+
fn empty() {
+
    assert_matches!(RefStr::try_from_str(""), Err(Error::Empty));
+
    assert_matches!(RefString::try_from("".to_owned()), Err(Error::Empty));
+
}
+

+
#[test]
+
fn join() {
+
    let s = name::REFS.join(name::HEADS);
+
    let t = s.join(name::MAIN);
+
    assert_eq!("refs/heads", s.as_str());
+
    assert_eq!("refs/heads/main", t.as_str());
+
}
+

+
#[test]
+
fn join_and() {
+
    assert_eq!(
+
        "refs/heads/this/that",
+
        name::REFS
+
            .join(name::HEADS)
+
            .and(refname!("this"))
+
            .and(refname!("that"))
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn strip_prefix() {
+
    assert_eq!(
+
        "main",
+
        name::REFS_HEADS_MAIN
+
            .strip_prefix(refname!("refs/heads"))
+
            .unwrap()
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn strip_prefix_not_prefix() {
+
    assert!(name::REFS_HEADS_MAIN
+
        .strip_prefix(refname!("refs/tags"))
+
        .is_none())
+
}
+

+
#[test]
+
fn qualified() {
+
    assert_eq!(
+
        "refs/heads/main",
+
        name::REFS_HEADS_MAIN.qualified().unwrap().as_str()
+
    )
+
}
+

+
#[test]
+
fn qualified_tag() {
+
    assert_eq!(
+
        "refs/tags/v1",
+
        refname!("refs/tags/v1").qualified().unwrap().as_str()
+
    )
+
}
+

+
#[test]
+
fn qualified_remote_tracking() {
+
    assert_eq!(
+
        "refs/remotes/origin/master",
+
        refname!("refs/remotes/origin/master")
+
            .qualified()
+
            .unwrap()
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn not_qualified() {
+
    assert!(name::MAIN.qualified().is_none())
+
}
+

+
#[test]
+
fn qualified_from_components() {
+
    assert_eq!(
+
        "refs/heads/main",
+
        Qualified::from_components(component::HEADS, component::MAIN, None).as_str()
+
    )
+
}
+

+
#[test]
+
fn qualified_from_components_with_iter() {
+
    assert_eq!(
+
        "refs/heads/foo/bar/baz",
+
        Qualified::from_components(
+
            component::HEADS,
+
            name::component!("foo"),
+
            [name::component!("bar"), name::component!("baz")]
+
        )
+
        .as_str()
+
    )
+
}
+

+
#[test]
+
fn qualified_from_components_non_empty_iter() {
+
    let q = Qualified::from_components(component::HEADS, component::MAIN, None);
+
    let (refs, heads, main, mut empty) = q.non_empty_iter();
+
    assert!(empty.next().is_none());
+
    assert_eq!(("refs", "heads", "main"), (refs, heads, main))
+
}
+

+
#[test]
+
fn qualified_from_components_non_empty_components() {
+
    let q = Qualified::from_components(component::HEADS, component::MAIN, Some(component::MASTER));
+
    let (refs, heads, main, mut master) = q.non_empty_components();
+
    assert_eq!(
+
        (
+
            component::REFS,
+
            component::HEADS,
+
            component::MAIN,
+
            component::MASTER
+
        ),
+
        (refs, heads, main, master.next().unwrap())
+
    )
+
}
+

+
#[test]
+
fn namespaced() {
+
    assert_eq!(
+
        "refs/namespaces/foo/refs/heads/main",
+
        refname!("refs/namespaces/foo/refs/heads/main")
+
            .namespaced()
+
            .unwrap()
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn not_namespaced() {
+
    assert!(name::REFS_HEADS_MAIN.namespaced().is_none())
+
}
+

+
#[test]
+
fn not_namespaced_because_not_qualified() {
+
    assert!(refname!("refs/namespaces/foo/banana")
+
        .namespaced()
+
        .is_none())
+
}
+

+
#[test]
+
fn strip_namespace() {
+
    assert_eq!(
+
        "refs/rad/id",
+
        refname!("refs/namespaces/xyz/refs/rad/id")
+
            .namespaced()
+
            .unwrap()
+
            .strip_namespace()
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn strip_nested_namespaces() {
+
    let full = refname!("refs/namespaces/a/refs/namespaces/b/refs/heads/main");
+
    let namespaced = full.namespaced().unwrap();
+
    let strip_first = namespaced.strip_namespace();
+
    let nested = strip_first.namespaced().unwrap();
+
    let strip_second = nested.strip_namespace();
+

+
    assert_eq!("a", namespaced.namespace().as_str());
+
    assert_eq!("b", nested.namespace().as_str());
+
    assert_eq!("refs/namespaces/b/refs/heads/main", strip_first.as_str());
+
    assert_eq!("refs/heads/main", strip_second.as_str());
+
}
+

+
#[test]
+
fn add_namespace() {
+
    assert_eq!(
+
        "refs/namespaces/foo/refs/heads/main",
+
        name::REFS_HEADS_MAIN
+
            .qualified()
+
            .unwrap()
+
            .add_namespace(refname!("foo").head())
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn iter() {
+
    assert_eq!(
+
        vec!["refs", "heads", "main"],
+
        name::REFS_HEADS_MAIN.iter().collect::<Vec<_>>()
+
    )
+
}
+

+
#[test]
+
fn push_pop() {
+
    let mut s = name::REFS.to_owned();
+
    s.push(name::HEADS);
+
    s.push(name::MAIN);
+

+
    assert_eq!("refs/heads/main", s.as_str());
+
    assert!(s.pop());
+
    assert!(s.pop());
+
    assert_eq!("refs", s.as_str());
+
    assert!(!s.pop());
+
    assert_eq!("refs", s.as_str());
+
}
+

+
#[test]
+
fn to_pattern() {
+
    assert_eq!(
+
        "refs/heads/*",
+
        refname!("refs/heads")
+
            .to_pattern(refspec::pattern!("*"))
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn with_pattern() {
+
    assert_eq!(
+
        "refs/heads/*",
+
        refname!("refs/heads").with_pattern(refspec::STAR).as_str()
+
    )
+
}
+

+
#[test]
+
fn with_pattern_and() {
+
    assert_eq!(
+
        "refs/*/heads",
+
        refname!("refs")
+
            .with_pattern(refspec::STAR)
+
            .and(name::HEADS)
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn collect() {
+
    assert_eq!(
+
        "refs/heads/main",
+
        IntoIterator::into_iter([name::REFS, name::HEADS, name::MAIN])
+
            .collect::<RefString>()
+
            .as_str()
+
    )
+
}
+

+
#[test]
+
fn collect_components() {
+
    let a = name::REFS_HEADS_MAIN.to_owned();
+
    let b = a.components().collect();
+
    assert_eq!(a, b)
+
}
+

+
#[test]
+
fn collect_pattern_duplicate_glob() {
+
    assert_matches!(
+
        IntoIterator::into_iter([
+
            refspec::Component::Normal(name::REFS),
+
            refspec::Component::Glob(None),
+
            refspec::Component::Glob(Some(refspec::pattern!("fo*").as_ref()))
+
        ])
+
        .collect::<Result<_, _>>(),
+
        Err(refspec::DuplicateGlob)
+
    )
+
}
added git-trailers/Cargo.toml
@@ -0,0 +1,14 @@
+
[package]
+
name = "git-trailers"
+
version = "0.1.0"
+
authors = ["Nuno Alexandre <hi@nunoalexandre.com>", "Kim Altintop <kim@eagain.st>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[dependencies]
+
nom = "7.1"
+
thiserror = "1.0"
added git-trailers/src/lib.rs
@@ -0,0 +1,290 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{borrow::Cow, convert::TryFrom, fmt, ops::Deref};
+

+
use thiserror::Error;
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum Error {
+
    #[error("the trailers paragraph is missing in the given message")]
+
    MissingParagraph,
+

+
    #[error("trailing data after trailers section: '{0}")]
+
    Trailing(String),
+

+
    #[error(transparent)]
+
    Parse(#[from] nom::Err<nom::error::Error<String>>),
+
}
+

+
#[derive(Debug, Clone, Eq, PartialEq)]
+
pub struct Trailer<'a> {
+
    pub token: Token<'a>,
+
    pub values: Vec<Cow<'a, str>>,
+
}
+

+
impl<'a> Trailer<'a> {
+
    pub fn display(&'a self, separator: &'a str) -> Display<'a> {
+
        Display {
+
            trailer: self,
+
            separator,
+
        }
+
    }
+

+
    pub fn to_owned(&self) -> OwnedTrailer {
+
        OwnedTrailer::from(self)
+
    }
+
}
+

+
#[derive(Debug, Clone, Eq, PartialEq)]
+
pub struct Token<'a>(&'a str);
+

+
/// A version of the Trailer<'a> which owns it's token and values. Useful for
+
/// when you need to carry trailers around in a long lived data structure.
+
pub struct OwnedTrailer {
+
    token: OwnedToken,
+
    values: Vec<String>,
+
}
+

+
pub struct OwnedToken(String);
+

+
impl<'a> From<&Trailer<'a>> for OwnedTrailer {
+
    fn from(t: &Trailer<'a>) -> Self {
+
        OwnedTrailer {
+
            token: OwnedToken(t.token.0.to_string()),
+
            values: t.values.iter().map(|v| v.to_string()).collect(),
+
        }
+
    }
+
}
+

+
impl<'a> From<Trailer<'a>> for OwnedTrailer {
+
    fn from(t: Trailer<'a>) -> Self {
+
        (&t).into()
+
    }
+
}
+

+
impl<'a> From<&'a OwnedTrailer> for Trailer<'a> {
+
    fn from(t: &'a OwnedTrailer) -> Self {
+
        Trailer {
+
            token: Token(t.token.0.as_str()),
+
            values: t.values.iter().map(Cow::from).collect(),
+
        }
+
    }
+
}
+

+
#[derive(Debug, Error)]
+
#[non_exhaustive]
+
pub enum InvalidToken {
+
    #[error("trailing characters: '{0}'")]
+
    Trailing(String),
+

+
    #[error(transparent)]
+
    Parse(#[from] nom::Err<nom::error::Error<String>>),
+
}
+

+
impl<'a> TryFrom<&'a str> for Token<'a> {
+
    type Error = InvalidToken;
+

+
    fn try_from(s: &'a str) -> Result<Self, Self::Error> {
+
        match parser::token(s) {
+
            Ok((rest, token)) if rest.is_empty() => Ok(token),
+
            Ok((trailing, _)) => Err(InvalidToken::Trailing(trailing.to_owned())),
+
            Err(e) => Err(e.to_owned().into()),
+
        }
+
    }
+
}
+

+
impl Deref for Token<'_> {
+
    type Target = str;
+

+
    fn deref(&self) -> &Self::Target {
+
        self.0
+
    }
+
}
+

+
pub struct Display<'a> {
+
    trailer: &'a Trailer<'a>,
+
    separator: &'a str,
+
}
+

+
impl<'a> fmt::Display for Display<'a> {
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        write!(
+
            f,
+
            "{}{}{}",
+
            self.trailer.token.deref(),
+
            self.separator,
+
            self.trailer.values.join("\n  ")
+
        )
+
    }
+
}
+

+
pub trait Separator<'a> {
+
    fn sep_for(&self, token: &Token) -> &'a str;
+
}
+

+
impl<'a> Separator<'a> for &'a str {
+
    fn sep_for(&self, _: &Token) -> &'a str {
+
        self
+
    }
+
}
+

+
impl<'a, F> Separator<'a> for F
+
where
+
    F: Fn(&Token) -> &'a str,
+
{
+
    fn sep_for(&self, token: &Token) -> &'a str {
+
        self(token)
+
    }
+
}
+

+
pub struct DisplayMany<'a, S> {
+
    separator: S,
+
    trailers: &'a [Trailer<'a>],
+
}
+

+
impl<'a, S> fmt::Display for DisplayMany<'a, S>
+
where
+
    S: Separator<'a>,
+
{
+
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+
        for (i, trailer) in self.trailers.iter().enumerate() {
+
            if i > 0 {
+
                writeln!(f)?
+
            }
+

+
            write!(
+
                f,
+
                "{}",
+
                trailer.display(self.separator.sep_for(&trailer.token))
+
            )?
+
        }
+

+
        Ok(())
+
    }
+
}
+

+
/// Parse the trailers of the given message. It looks up the last paragraph
+
/// of the message and attempts to parse each of its lines as a [Trailer].
+
/// Fails if no trailers paragraph is found or if at least one trailer
+
/// fails to be parsed.
+
pub fn parse<'a>(message: &'a str, separators: &'a str) -> Result<Vec<Trailer<'a>>, Error> {
+
    let trailers_paragraph =
+
        match parser::paragraphs(message.trim_end()).map(|(_, ps)| ps.last().cloned()) {
+
            Ok(None) | Err(_) => return Err(Error::MissingParagraph),
+
            Ok(Some(p)) => {
+
                if p.is_empty() {
+
                    return Err(Error::MissingParagraph);
+
                }
+
                p
+
            },
+
        };
+

+
    match parser::trailers(trailers_paragraph, separators) {
+
        Ok((rest, trailers)) if rest.is_empty() => Ok(trailers),
+
        Ok((unparseable, _)) => Err(Error::Trailing(unparseable.to_owned())),
+
        Err(e) => Err(e.to_owned().into()),
+
    }
+
}
+

+
/// Render a slice of trailers.
+
///
+
/// The `separator` can be either a string slice, or a closure which may choose
+
/// a different separator for each [`Token`] encountered. Note that multiline
+
/// trailers are rendered with a fixed indent, so the result is not
+
/// layout-preserving.
+
pub fn display<'a, S>(separator: S, trailers: &'a [Trailer<'a>]) -> DisplayMany<'a, S>
+
where
+
    S: Separator<'a>,
+
{
+
    DisplayMany {
+
        separator,
+
        trailers,
+
    }
+
}
+

+
pub mod parser {
+
    use std::borrow::Cow;
+

+
    use super::{Token, Trailer};
+
    use nom::{
+
        branch::alt,
+
        bytes::complete::{tag, take_until, take_while1},
+
        character::complete::{line_ending, not_line_ending, one_of, space0, space1},
+
        combinator::{map, rest},
+
        multi::{many0, separated_list1},
+
        sequence::{delimited, preceded, separated_pair, terminated},
+
        IResult,
+
    };
+

+
    const EMPTY_LINE: &str = "\n\n";
+

+
    pub fn paragraphs(s: &str) -> IResult<&str, Vec<&str>> {
+
        separated_list1(tag(EMPTY_LINE), paragraph)(s)
+
    }
+

+
    pub fn paragraph(s: &str) -> IResult<&str, &str> {
+
        alt((take_until(EMPTY_LINE), rest))(s)
+
    }
+

+
    /// Parse all the possible trailers.
+
    /// It stops when it can no longer parse valid trailers.
+
    pub fn trailers<'a>(s: &'a str, separators: &'a str) -> IResult<&'a str, Vec<Trailer<'a>>> {
+
        many0(|s| trailer(s, separators))(s)
+
    }
+

+
    /// Parse a trailer, which can have an inlined or multilined value.
+
    pub fn trailer<'a>(s: &'a str, separators: &'a str) -> IResult<&'a str, Trailer<'a>> {
+
        let mut parser = separated_pair(token, |s| separator(separators, s), values);
+
        let (rest, (token, values)) = parser(s)?;
+
        Ok((rest, Trailer { token, values }))
+
    }
+

+
    /// Parse a trailer token.
+
    pub(super) fn token(s: &str) -> IResult<&str, Token> {
+
        take_while1(|c: char| c.is_alphanumeric() || c == '-')(s)
+
            .map(|(i, token_str)| (i, Token(token_str)))
+
    }
+

+
    /// Parse the trailer separator, which can be delimited by spaces.
+
    fn separator<'a>(separators: &'a str, s: &'a str) -> IResult<&'a str, char> {
+
        delimited(space0, one_of(separators), space0)(s)
+
    }
+

+
    /// Parse the trailer values, which gathers the value after the separator
+
    /// (if any) and possible following multilined values, indented by a
+
    /// space.
+
    fn values(s: &str) -> IResult<&str, Vec<Cow<'_, str>>> {
+
        let (r, opt_inline_value) = until_eol_or_eof(s)?;
+
        let (r, mut values) = multiline_values(r)?;
+
        if !opt_inline_value.is_empty() {
+
            values.insert(0, opt_inline_value.into())
+
        }
+
        Ok((r, values))
+
    }
+

+
    fn multiline_values(s: &str) -> IResult<&str, Vec<Cow<'_, str>>> {
+
        many0(map(indented_line_contents, Cow::from))(s)
+
    }
+

+
    fn until_eol_or_eof(s: &str) -> IResult<&str, &str> {
+
        alt((until_eol, rest))(s)
+
    }
+

+
    /// Parse an indented line, i.e, a line that starts with a space.
+
    /// Extracts the line contents, ignoring the indentation and the
+
    /// new line character.
+
    fn indented_line_contents(s: &str) -> IResult<&str, &str> {
+
        preceded(space1, until_eol_or_eof)(s)
+
    }
+

+
    /// Consume the input until the end of the line, ignoring the new line
+
    /// character.
+
    fn until_eol(s: &str) -> IResult<&str, &str> {
+
        terminated(not_line_ending, line_ending)(s)
+
    }
+
}
added git-trailers/t/Cargo.toml
@@ -0,0 +1,22 @@
+
[package]
+
name = "git-trailers-test"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+

+
publish = false
+

+
[lib]
+
doctest = false
+
test = true
+
doc = false
+

+
[features]
+
test = []
+

+
[dev-dependencies]
+
assert_matches = "1.5"
+
pretty_assertions = "1.1"
+

+
[dev-dependencies.git-trailers]
+
path = ".."
added git-trailers/t/src/lib.rs
@@ -0,0 +1,2 @@
+
#[cfg(test)]
+
mod tests;
added git-trailers/t/src/tests.rs
@@ -0,0 +1,155 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{borrow::Cow, convert::TryFrom as _, ops::Deref as _};
+

+
use assert_matches::assert_matches;
+
use git_trailers::{display, parse, Error, Token, Trailer};
+
use pretty_assertions::assert_eq;
+

+
#[test]
+
fn parse_message_with_valid_trailers() {
+
    let msg = r#"Subject
+

+
A multiline
+
description.
+

+
Co-authored-by: John Doe <john.doe@test.com>
+
Ticket: #42
+
Tested-by:
+
    John <john@test.com>
+
    Jane <jane@test.com>
+
Just-a-token:
+

+
"#;
+
    assert_eq!(
+
        parse(msg, ":").unwrap(),
+
        vec![
+
            new_trailer("Co-authored-by", &["John Doe <john.doe@test.com>"]),
+
            new_trailer("Ticket", &["#42"]),
+
            new_trailer(
+
                "Tested-by",
+
                &["John <john@test.com>", "Jane <jane@test.com>"]
+
            ),
+
            new_trailer("Just-a-token", &[]),
+
        ]
+
    )
+
}
+

+
#[test]
+
fn parse_message_trailers_with_custom_separators() {
+
    let separators = ":=$";
+
    let msg = r#"Subject
+

+
A multiline
+
description.
+

+
Co-authored-by: John Doe <john.doe@test.com>
+
Ticket = #42
+
Tested-by $User <user@test.com>
+
    John <john@test.com>
+
    Jane <jane@test.com>
+
"#;
+
    assert_eq!(
+
        parse(msg, separators).unwrap(),
+
        vec![
+
            new_trailer("Co-authored-by", &["John Doe <john.doe@test.com>"]),
+
            new_trailer("Ticket", &["#42"]),
+
            new_trailer(
+
                "Tested-by",
+
                &[
+
                    "User <user@test.com>",
+
                    "John <john@test.com>",
+
                    "Jane <jane@test.com>"
+
                ]
+
            ),
+
        ]
+
    )
+
}
+

+
#[test]
+
fn parse_message_trailers_with_missing_token() {
+
    let msg = r#"Subject
+

+
Good-trailer: true
+
John Doe <john.doe@test.com> # Unparsable token due to missing token"#;
+
    assert_matches!(
+
        parse(msg, ":"),
+
        Err(Error::Trailing(s))
+
            if s == "John Doe <john.doe@test.com> # Unparsable token due to missing token"
+
    )
+
}
+

+
#[test]
+
fn parse_message_trailers_with_invalid_token() {
+
    let msg = r#"Subject
+

+
Good-trailer: true
+
&!#: John Doe <john.doe@test.com> # Unparsable token due to invalid token"#;
+
    assert_matches!(
+
        parse(msg, ":"),
+
        Err(Error::Trailing(s))
+
            if s == "&!#: John Doe <john.doe@test.com> # Unparsable token due to invalid token"
+
    )
+
}
+

+
#[test]
+
fn parse_message_with_only_trailers() {
+
    let msg = r#"Co-authored-by: John Doe <john.doe@test.com>
+
Ticket: #42
+
Tested-by: Tester <tester@test.com>
+
"#;
+
    assert_eq!(
+
        parse(msg, ":").unwrap(),
+
        vec![
+
            new_trailer("Co-authored-by", &["John Doe <john.doe@test.com>"]),
+
            new_trailer("Ticket", &["#42"]),
+
            new_trailer("Tested-by", &["Tester <tester@test.com>"]),
+
        ]
+
    )
+
}
+

+
#[test]
+
fn parse_empty_message() {
+
    let msg = "";
+
    assert_matches!(parse(msg, ":"), Err(Error::MissingParagraph))
+
}
+

+
#[test]
+
fn display_static() {
+
    let msg = r#"Tested-by: Alice
+
  Bob
+
  Carol
+
  Dylan
+
Acked-by: Eve"#;
+

+
    let parsed = parse(msg, ":").unwrap();
+
    let rendered = format!("{}", display(": ", &parsed));
+
    assert_eq!(&rendered, msg);
+
}
+

+
#[test]
+
fn display_dynamic() {
+
    let msg = r#"Co-authored-by: John Doe <john.doe@test.com>
+
Tested-by: Tester <tester@test.com>
+
Fixes #42"#;
+

+
    let parsed = parse(msg, ":#").unwrap();
+
    let rendered = format!(
+
        "{}",
+
        display(
+
            |t: &Token| if t.deref() == "Fixes" { " #" } else { ": " },
+
            &parsed
+
        )
+
    );
+
    assert_eq!(rendered, msg)
+
}
+

+
fn new_trailer<'a>(token: &'a str, values: &[&'a str]) -> Trailer<'a> {
+
    Trailer {
+
        token: Token::try_from(token).unwrap(),
+
        values: values.iter().map(|s| Cow::from(*s)).collect(),
+
    }
+
}
added link-git/Cargo.toml
@@ -0,0 +1,64 @@
+
[package]
+
name = "link-git"
+
version = "0.1.0"
+
authors = ["Kim Altintop <kim@eagain.st>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+

+
description = "Core git types and functionality"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[dependencies]
+
arc-swap = "1.4.0"
+
async-process = "1.1.0"
+
async-trait = "0.1"
+
blocking = "1.0.2"
+
bstr = "0.2"
+
futures-lite = "1.12.0"
+
futures-util = "0.3.15"
+
lazy_static = "1.4.0"
+
im = "15.0.0"
+
once_cell = "1.10"
+
parking_lot = "0.12"
+
pin-project = "1.0.7"
+
regex = "1.5.4"
+
rustc-hash = "1.1.0"
+
tempfile = "3.3"
+
thiserror = "1.0.30"
+
tracing = "0.1"
+
versions = "3.0.2"
+

+
# gitoxide
+
git-actor = "^0.6.0"
+
git-hash = "^0.8.0"
+
git-lock = "^1.0.1"
+
git-object = "^0.15.1"
+
git-odb = "^0.24.0"
+
git-ref = "^0.9.0"
+
git-traverse = "^0.10.0"
+

+
[dependencies.git-features]
+
version = "^0.17.0"
+
features = ["progress", "parallel", "zlib-ng-compat"]
+

+
[dependencies.git-pack]
+
version = "^0.14.0"
+
features = ["object-cache-dynamic", "pack-cache-lru-static", "pack-cache-lru-dynamic"]
+

+
[dependencies.git-packetline]
+
version = "^0.12.0"
+
features = ["async-io"]
+

+
[dependencies.git-protocol]
+
version = "^0.12.0"
+
features = ["async-client"]
+

+
# compat
+
[dependencies.git2]
+
version = "0.13.24"
+
default-features = false
+
features = ["vendored-libgit2"]
+
optional = true
added link-git/src/lib.rs
@@ -0,0 +1,20 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
#[macro_use]
+
extern crate async_trait;
+

+
pub mod odb;
+
pub mod protocol;
+
pub mod refs;
+
pub use refs::db as refdb;
+
#[cfg(feature = "git2")]
+
pub mod service;
+

+
pub use git_actor as actor;
+
pub use git_hash as hash;
+
pub use git_lock as lock;
+
pub use git_object as object;
+
pub use git_traverse as traverse;
added link-git/src/odb.rs
@@ -0,0 +1,51 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use git_hash::oid;
+
use thiserror::Error;
+

+
pub mod backend;
+
pub mod index;
+
pub mod pack;
+
pub mod window;
+

+
pub use git_pack::{cache, data::Object};
+

+
#[derive(Debug, Error)]
+
pub enum Error {
+
    #[error(transparent)]
+
    Packed(#[from] index::error::Lookup<pack::error::Data>),
+

+
    #[error(transparent)]
+
    Loose(#[from] git_odb::loose::find::Error),
+
}
+

+
pub struct Odb<I, D> {
+
    pub loose: backend::Loose,
+
    pub packed: backend::Packed<I, D>,
+
}
+

+
impl<I, D> Odb<I, D>
+
where
+
    I: index::Index,
+
    D: window::Cache,
+
{
+
    pub fn contains(&self, id: impl AsRef<oid>) -> bool {
+
        self.packed.contains(id.as_ref()) || self.loose.contains(id)
+
    }
+

+
    pub fn find<'a>(
+
        &self,
+
        id: impl AsRef<oid>,
+
        buf: &'a mut Vec<u8>,
+
        cache: &mut impl cache::DecodeEntry,
+
    ) -> Result<Option<Object<'a>>, Error> {
+
        let id = id.as_ref();
+
        if self.packed.contains(id) {
+
            return self.packed.find(id, buf, cache).map_err(Into::into);
+
        }
+
        self.loose.try_find(id, buf).map_err(Into::into)
+
    }
+
}
added link-git/src/odb/backend.rs
@@ -0,0 +1,36 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use git_hash::oid;
+
use git_pack::{cache::DecodeEntry, data::Object};
+

+
use super::{index, pack, window};
+

+
pub type Loose = git_odb::loose::Store;
+

+
pub struct Packed<I, D> {
+
    pub index: I,
+
    pub data: D,
+
}
+

+
impl<I, D> Packed<I, D>
+
where
+
    I: index::Index,
+
    D: window::Cache,
+
{
+
    pub fn contains(&self, id: impl AsRef<oid>) -> bool {
+
        self.index.contains(id)
+
    }
+

+
    pub fn find<'a>(
+
        &self,
+
        id: impl AsRef<oid>,
+
        buf: &'a mut Vec<u8>,
+
        cache: &mut impl DecodeEntry,
+
    ) -> Result<Option<Object<'a>>, index::error::Lookup<pack::error::Data>> {
+
        self.index
+
            .lookup(|info| self.data.get(info), id, buf, cache)
+
    }
+
}
added link-git/src/odb/index.rs
@@ -0,0 +1,307 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    fs,
+
    io,
+
    path::{Path, PathBuf},
+
    sync::Arc,
+
};
+

+
use arc_swap::ArcSwap;
+
use git_hash::oid;
+
use git_pack::{
+
    cache::DecodeEntry,
+
    data::{Object, ResolvedBase},
+
};
+
use parking_lot::Mutex;
+
use tracing::trace;
+

+
use super::pack;
+

+
pub use git_pack::index::File as IndexFile;
+

+
mod metrics;
+
pub use metrics::{Metrics, Stats, StatsView};
+

+
pub mod error {
+
    use super::*;
+
    use thiserror::Error;
+

+
    #[derive(Debug, Error)]
+
    pub enum Discover {
+
        #[error(transparent)]
+
        Index(#[from] pack::error::Index),
+

+
        #[error(transparent)]
+
        Io(#[from] io::Error),
+
    }
+

+
    #[derive(Debug, Error)]
+
    pub enum Lookup<E> {
+
        #[error(transparent)]
+
        Lookup(E),
+

+
        #[error(transparent)]
+
        Reload(#[from] Discover),
+

+
        #[error(transparent)]
+
        Decode(#[from] git_pack::data::decode_entry::Error),
+
    }
+
}
+

+
pub trait Index {
+
    fn contains(&self, id: impl AsRef<oid>) -> bool;
+

+
    fn lookup<'a, F, E>(
+
        &self,
+
        pack_cache: F,
+
        id: impl AsRef<oid>,
+
        buf: &'a mut Vec<u8>,
+
        cache: &mut impl DecodeEntry,
+
    ) -> Result<Option<Object<'a>>, error::Lookup<E>>
+
    where
+
        F: FnOnce(&pack::Info) -> Result<Arc<pack::Data>, E>;
+
}
+

+
/// An [`Index`] which can be shared between threads.
+
///
+
/// [`Shared`] assumes that:
+
///
+
/// * newer packs are likely to contain recent objects
+
/// * lookups tend to favour recent objects
+
/// * lookups tend to expect the object to be found (the object id is either
+
///   pointed to by a ref, or linked to by an existing object)
+
///
+
/// Thus, it:
+
///
+
/// * orders indices found in `GIT_DIR/objects/pack` by modification time, and
+
///   queries the more recent ones first
+
/// * attempts to rescan `GIT_DIR/objects/pack` when an object id was _not_
+
///   found (assuming that this is due to a compaction)
+
///
+
/// Unless a reload occurs, lookups are lock-free and mostly wait-free. Writes
+
/// ([`Shared::push`], [`Shared::reload`]) are guarded by a [`Mutex`].
+
// TODO: consecutive lookups also tend to resolve to the same pack, so we could
+
// remember the index into the `im::Vector` where we found a match and look
+
// there first. This is what libgit2 does, but the heuristic is not necessarily
+
// true when `Shared` is shared across multiple concurrent link replication
+
// tasks; per-namespace packs are independent pre-compaction.
+
pub struct Shared<M> {
+
    pack_dir: PathBuf,
+
    indices: ArcSwap<im::Vector<Arc<pack::Index>>>,
+
    write: Mutex<()>,
+
    stats: M,
+
}
+

+
impl Shared<()> {
+
    pub fn open(git_dir: impl AsRef<Path>) -> Result<Self, error::Discover> {
+
        let pack_dir = git_dir.as_ref().join("objects").join("pack");
+
        let indices = discover(&pack_dir)?;
+

+
        Ok(Self {
+
            pack_dir,
+
            indices: ArcSwap::new(Arc::new(indices)),
+
            write: Mutex::new(()),
+
            stats: (),
+
        })
+
    }
+
}
+

+
impl<M> Shared<M>
+
where
+
    M: Metrics,
+
{
+
    pub fn with_stats(self) -> Shared<Stats> {
+
        self.with_metrics(Stats::default())
+
    }
+

+
    pub fn with_metrics<N: Metrics>(self, m: N) -> Shared<N> {
+
        Shared {
+
            pack_dir: self.pack_dir,
+
            indices: self.indices,
+
            write: self.write,
+
            stats: m,
+
        }
+
    }
+

+
    pub fn stats(&self) -> M::Snapshot {
+
        self.stats.snapshot(self.len())
+
    }
+

+
    /// Add a newly discovered [`pack::Index`].
+
    ///
+
    /// This index will be considered first by subsequent lookups. Note that it
+
    /// is only guaranteed that the index will be visible to readers if it
+
    /// resides in the `git_dir` this [`Shared`] was initialised with.
+
    pub fn push(&self, idx: pack::Index) {
+
        let lock = self.write.lock();
+
        let mut new = self.indices.load_full();
+
        Arc::make_mut(&mut new).push_front(Arc::new(idx));
+
        self.indices.store(new);
+
        drop(lock);
+

+
        self.stats.record_push()
+
    }
+

+
    /// Re-scan the packs directory and replace the in-memory indices with the
+
    /// result.
+
    ///
+
    /// If the application can intercept compaction events, this method can be
+
    /// used to release memory early. Otherwise it is not required to call this
+
    /// method, as [`Shared`] manages reloads automatically.
+
    pub fn reload(&self) -> Result<(), error::Discover> {
+
        let lock = self.write.lock();
+
        let indices = discover(&self.pack_dir)?;
+
        self.indices.store(Arc::new(indices));
+
        drop(lock);
+

+
        self.stats.record_reload();
+

+
        Ok(())
+
    }
+

+
    pub fn is_empty(&self) -> bool {
+
        self.indices.load().is_empty()
+
    }
+

+
    pub fn len(&self) -> usize {
+
        self.indices.load().len()
+
    }
+

+
    fn contains(&self, id: impl AsRef<oid>) -> bool {
+
        for i in 0..2 {
+
            for idx in self.indices.load().iter() {
+
                if idx.contains(&id) {
+
                    self.stats.record_hit();
+
                    return true;
+
                }
+
            }
+

+
            if i == 0 && self.reload().is_err() {
+
                self.stats.record_miss();
+
                return false;
+
            }
+
        }
+

+
        self.stats.record_miss();
+
        false
+
    }
+

+
    fn lookup<'a, F, E>(
+
        &self,
+
        pack_cache: F,
+
        id: impl AsRef<oid>,
+
        buf: &'a mut Vec<u8>,
+
        cache: &mut impl DecodeEntry,
+
    ) -> Result<Option<Object<'a>>, error::Lookup<E>>
+
    where
+
        F: FnOnce(&pack::Info) -> Result<Arc<pack::Data>, E>,
+
    {
+
        for i in 0..2 {
+
            for idx in self.indices.load().iter() {
+
                if let Some(ofs) = idx.ofs(&id) {
+
                    self.stats.record_hit();
+
                    return load_obj(ofs, idx, pack_cache, buf, cache).map(Some);
+
                }
+
            }
+

+
            if i == 0 {
+
                self.reload()?;
+
            }
+
        }
+

+
        self.stats.record_miss();
+
        Ok(None)
+
    }
+
}
+

+
fn load_obj<'a, F, E>(
+
    ofs: u64,
+
    idx: &pack::Index,
+
    pack_cache: F,
+
    buf: &'a mut Vec<u8>,
+
    cache: &mut impl DecodeEntry,
+
) -> Result<Object<'a>, error::Lookup<E>>
+
where
+
    F: FnOnce(&pack::Info) -> Result<Arc<pack::Data>, E>,
+
{
+
    let data = pack_cache(&idx.info).map_err(error::Lookup::Lookup)?;
+
    let pack = data.file();
+
    let entry = pack.entry(ofs);
+
    let obj = pack
+
        .decode_entry(
+
            entry,
+
            buf,
+
            |id, _| idx.ofs(id).map(|ofs| ResolvedBase::InPack(pack.entry(ofs))),
+
            cache,
+
        )
+
        .map(move |out| Object {
+
            kind: out.kind,
+
            data: buf.as_slice(),
+
            pack_location: None,
+
        })?;
+

+
    Ok(obj)
+
}
+

+
fn discover(pack_dir: impl AsRef<Path>) -> Result<im::Vector<Arc<pack::Index>>, error::Discover> {
+
    let pack_dir = pack_dir.as_ref();
+
    let pack_dir_disp = pack_dir.display();
+
    trace!("discovering packs at {}", pack_dir_disp);
+
    match fs::read_dir(&pack_dir) {
+
        Ok(iter) => {
+
            let mut paths = Vec::new();
+
            for entry in iter {
+
                let entry = entry?;
+
                let path = entry.path();
+
                trace!("{}", path.display());
+
                let meta = entry.metadata()?;
+
                if meta.file_type().is_file() && path.extension().unwrap_or_default() == "idx" {
+
                    let mtime = meta.modified()?;
+
                    paths.push((path, mtime));
+
                }
+
            }
+
            paths.sort_by(|(_, mtime_a), (_, mtime_b)| mtime_a.cmp(mtime_b));
+

+
            let indices = paths
+
                .into_iter()
+
                .rev()
+
                .map(|(path, _)| Ok(pack::Index::open(path).map(Arc::new)?))
+
                .collect::<Result<_, error::Discover>>()?;
+

+
            Ok(indices)
+
        },
+
        // It's not an error if the directory doesn't exist, the repository
+
        // could contain only loose objects
+
        Err(e) if e.kind() == io::ErrorKind::NotFound => {
+
            trace!("not a directory: {}", pack_dir_disp);
+
            Ok(im::Vector::new())
+
        },
+
        Err(e) => Err(e.into()),
+
    }
+
}
+

+
impl<M> Index for Shared<M>
+
where
+
    M: Metrics,
+
{
+
    fn contains(&self, id: impl AsRef<oid>) -> bool {
+
        self.contains(id)
+
    }
+

+
    fn lookup<'a, F, E>(
+
        &self,
+
        pack_cache: F,
+
        id: impl AsRef<oid>,
+
        buf: &'a mut Vec<u8>,
+
        cache: &mut impl DecodeEntry,
+
    ) -> Result<Option<Object<'a>>, error::Lookup<E>>
+
    where
+
        F: FnOnce(&pack::Info) -> Result<Arc<pack::Data>, E>,
+
    {
+
        self.lookup(pack_cache, id, buf, cache)
+
    }
+
}
added link-git/src/odb/index/metrics.rs
@@ -0,0 +1,80 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::sync::atomic::{AtomicUsize, Ordering};
+

+
pub struct StatsView {
+
    /// Total number of times a lookup was successful.
+
    pub hits: usize,
+
    /// Total number of times a lookup was unsuccessful.
+
    pub misses: usize,
+
    /// Total number of times an index was added explicitly via
+
    /// [`super::Shared::push`].
+
    pub pushes: usize,
+
    /// Total number of reloads via [`super::Shared::reload`].
+
    pub reloads: usize,
+
    /// Number of [`crate::odb::pack::Index`]es currently held.
+
    pub indices: usize,
+
}
+

+
#[derive(Default)]
+
pub struct Stats {
+
    hits: AtomicUsize,
+
    misses: AtomicUsize,
+
    pushes: AtomicUsize,
+
    reloads: AtomicUsize,
+
}
+

+
pub trait Metrics {
+
    type Snapshot;
+

+
    fn record_hit(&self);
+
    fn record_miss(&self);
+
    fn record_push(&self);
+
    fn record_reload(&self);
+

+
    fn snapshot(&self, indices: usize) -> Self::Snapshot;
+
}
+

+
impl Metrics for Stats {
+
    type Snapshot = StatsView;
+

+
    fn record_hit(&self) {
+
        self.hits.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn record_miss(&self) {
+
        self.misses.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn record_push(&self) {
+
        self.pushes.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn record_reload(&self) {
+
        self.reloads.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn snapshot(&self, indices: usize) -> Self::Snapshot {
+
        StatsView {
+
            hits: self.hits.load(Ordering::Relaxed),
+
            misses: self.misses.load(Ordering::Relaxed),
+
            pushes: self.pushes.load(Ordering::Relaxed),
+
            reloads: self.reloads.load(Ordering::Relaxed),
+
            indices,
+
        }
+
    }
+
}
+

+
impl Metrics for () {
+
    type Snapshot = ();
+

+
    fn record_hit(&self) {}
+
    fn record_miss(&self) {}
+
    fn record_push(&self) {}
+
    fn record_reload(&self) {}
+

+
    fn snapshot(&self, _: usize) -> Self::Snapshot {}
+
}
added link-git/src/odb/pack.rs
@@ -0,0 +1,138 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    path::{Path, PathBuf},
+
    sync::atomic::{AtomicUsize, Ordering},
+
};
+

+
use git_hash::{oid, ObjectId};
+
use git_pack::{data, index};
+
use rustc_hash::FxHasher;
+
use tracing::warn;
+

+
pub mod error {
+
    use super::*;
+
    use thiserror::Error;
+

+
    #[derive(Debug, Error)]
+
    #[error("failed to load pack data from {path:?}")]
+
    pub struct Data {
+
        pub path: PathBuf,
+
        pub source: data::header::decode::Error,
+
    }
+

+
    #[derive(Debug, Error)]
+
    #[error("failed to load pack index from {path:?}")]
+
    pub struct Index {
+
        pub path: PathBuf,
+
        pub source: index::init::Error,
+
    }
+
}
+

+
pub struct Data {
+
    pub hash: u64,
+
    hits: AtomicUsize,
+
    file: data::File,
+
}
+

+
impl Data {
+
    pub fn hit(&self) {
+
        self.hits.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    pub fn hits(&self) -> usize {
+
        self.hits.load(Ordering::Relaxed)
+
    }
+

+
    pub fn file(&self) -> &data::File {
+
        &self.file
+
    }
+
}
+

+
impl AsRef<data::File> for Data {
+
    fn as_ref(&self) -> &data::File {
+
        self.file()
+
    }
+
}
+

+
#[derive(Clone, PartialEq, Eq)]
+
pub struct Info {
+
    pub(super) hash: u64,
+
    pub data_path: PathBuf,
+
}
+

+
impl Info {
+
    pub fn data(&self) -> Result<Data, error::Data> {
+
        let file = data::File::at(&self.data_path).map_err(|source| error::Data {
+
            path: self.data_path.clone(),
+
            source,
+
        })?;
+
        Ok(Data {
+
            hash: self.hash,
+
            hits: AtomicUsize::new(0),
+
            file,
+
        })
+
    }
+
}
+

+
pub struct Index {
+
    pub info: Info,
+
    file: index::File,
+
}
+

+
impl Index {
+
    pub fn open(path: impl AsRef<Path>) -> Result<Self, error::Index> {
+
        let path = path.as_ref();
+
        let file = index::File::at(path).map_err(|source| error::Index {
+
            path: path.to_path_buf(),
+
            source,
+
        })?;
+
        let data_path = path.with_extension("pack");
+
        let hash = {
+
            let file_name = path
+
                .file_name()
+
                .expect("must have a file name, we opened it")
+
                .to_string_lossy();
+
            // XXX: inexplicably, gitoxide omits the "pack-" prefix
+
            let sha_hex = file_name.strip_prefix("pack-").unwrap_or(&file_name);
+
            match ObjectId::from_hex(&sha_hex.as_bytes()[..40]) {
+
                Err(e) => {
+
                    warn!(
+
                        "unconventional pack name {:?}, falling back to fxhash: {}",
+
                        path, e
+
                    );
+
                    hash(path)
+
                },
+
                Ok(oid) => {
+
                    let mut buf = [0u8; 8];
+
                    buf.copy_from_slice(&oid.sha1()[..8]);
+
                    u64::from_be_bytes(buf)
+
                },
+
            }
+
        };
+
        let info = Info { hash, data_path };
+

+
        Ok(Self { file, info })
+
    }
+

+
    pub fn contains(&self, id: impl AsRef<oid>) -> bool {
+
        self.file.lookup(id).is_some()
+
    }
+

+
    pub fn ofs(&self, id: impl AsRef<oid>) -> Option<u64> {
+
        self.file
+
            .lookup(id)
+
            .map(|idx| self.file.pack_offset_at_index(idx))
+
    }
+
}
+

+
fn hash(p: &Path) -> u64 {
+
    use std::hash::{Hash as _, Hasher as _};
+

+
    let mut hasher = FxHasher::default();
+
    p.hash(&mut hasher);
+
    hasher.finish()
+
}
added link-git/src/odb/window.rs
@@ -0,0 +1,183 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::sync::Arc;
+

+
use arc_swap::{ArcSwap, Guard};
+
use parking_lot::Mutex;
+

+
use super::pack;
+

+
mod metrics;
+
pub use metrics::{Metrics, Stats, StatsView};
+

+
/// A threadsafe, shareable cache of packfiles.
+
pub trait Cache {
+
    type Stats;
+

+
    fn stats(&self) -> Self::Stats;
+

+
    fn get(&self, info: &pack::Info) -> Result<Arc<pack::Data>, pack::error::Data>;
+
}
+

+
impl<M, const B: usize, const S: usize> Cache for Fixed<M, B, S>
+
where
+
    M: Metrics,
+
{
+
    type Stats = M::Snapshot;
+

+
    fn stats(&self) -> Self::Stats {
+
        self.stats()
+
    }
+

+
    fn get(&self, info: &pack::Info) -> Result<Arc<pack::Data>, pack::error::Data> {
+
        self.get(info)
+
    }
+
}
+

+
/// 128 open files
+
pub type Small<S> = Fixed<S, 16, 8>;
+
/// 512 open files
+
pub type Medium<S> = Fixed<S, 32, 16>;
+
/// 1024 open files
+
pub type Large<S> = Fixed<S, 64, 16>;
+
/// 2048 open files
+
pub type XLarge<S> = Fixed<S, 128, 16>;
+

+
/// A fixed-size [`Cache`].
+
///
+
/// [`Fixed`] is essentially a very simple, fixed-capacity hashtable. When a
+
/// pack (data-) file is requested via [`Cache::get`], the file is loaded
+
/// (typically `mmap`ed) from disk if it is not already in the cache. Otherwise,
+
/// a pointer to the already loaded file is returned. Old entries are replaced
+
/// on an approximate LRU basis when the cache becomes full (this means that old
+
/// entries are **not** evicted when there is still space).
+
///
+
/// The implementation is a somewhat dumbed-down version of JGit's
+
/// `WindowCache`. The main differences are that the table buckets are of fixed
+
/// size (`SLOTS`), instead of a linked list. This means that the cache does not
+
/// allow to (temporarily) commit more entries than its nominal capacity.
+
///
+
/// Reading cached values is lock-free and mostly wait-free. Modifications are
+
/// guarded by locks on individual buckets; if a cache miss occurs, multiple
+
/// threads requesting the same entry will be blocked until one of them
+
/// succeeded loading the data and updating the cache. Writers will _not_,
+
/// however, contend with readers (unlike `RwLock`).
+
///
+
/// This favours usage patterns where different threads tend to request disjoint
+
/// sets of packfiles, and of course their hashes colliding relatively
+
/// infrequently.
+
pub struct Fixed<M, const BUCKETS: usize, const SLOTS: usize> {
+
    entries: [ArcSwap<[Option<Arc<pack::Data>>; SLOTS]>; BUCKETS],
+
    locks: [Mutex<()>; BUCKETS],
+
    stats: M,
+
}
+

+
trait AssertSendSync: Send + Sync {}
+
impl<M, const B: usize, const S: usize> AssertSendSync for Fixed<M, B, S> where M: Send + Sync {}
+

+
impl<M, const B: usize, const S: usize> AsRef<Fixed<M, B, S>> for Fixed<M, B, S> {
+
    fn as_ref(&self) -> &Fixed<M, B, S> {
+
        self
+
    }
+
}
+

+
impl<const B: usize, const S: usize> Default for Fixed<(), B, S> {
+
    fn default() -> Self {
+
        Self {
+
            entries: [(); B].map(|_| ArcSwap::new(Arc::new([(); S].map(|_| None)))),
+
            locks: [(); B].map(|_| Mutex::new(())),
+
            stats: (),
+
        }
+
    }
+
}
+

+
impl<M, const B: usize, const S: usize> Fixed<M, B, S>
+
where
+
    M: Metrics,
+
{
+
    pub fn with_stats(self) -> Fixed<Stats, B, S> {
+
        self.with_metrics(Stats::default())
+
    }
+

+
    pub fn with_metrics<N: Metrics>(self, m: N) -> Fixed<N, B, S> {
+
        Fixed {
+
            entries: self.entries,
+
            locks: self.locks,
+
            stats: m,
+
        }
+
    }
+

+
    pub fn stats(&self) -> M::Snapshot {
+
        let open_files = self
+
            .entries
+
            .iter()
+
            .map(|bucket| bucket.load().iter().flatten().count())
+
            .sum();
+
        self.stats.snapshot(open_files)
+
    }
+

+
    pub fn get(&self, info: &pack::Info) -> Result<Arc<pack::Data>, pack::error::Data> {
+
        let idx = info.hash as usize % self.entries.len();
+

+
        let bucket = self.entries[idx].load();
+
        for entry in bucket.iter().flatten() {
+
            if entry.hash == info.hash {
+
                self.stats.record_hit();
+
                entry.hit();
+
                return Ok(Arc::clone(entry));
+
            }
+
        }
+
        drop(bucket);
+

+
        self.stats.record_miss();
+

+
        // Cache miss, try to load the data file
+
        let lock = self.locks[idx].lock();
+
        // Did someone else win the race for the lock?
+
        let bucket = self.entries[idx].load();
+
        for entry in bucket.iter().flatten() {
+
            if entry.hash == info.hash {
+
                self.stats.record_hit();
+
                entry.hit();
+
                return Ok(Arc::clone(entry));
+
            }
+
        }
+
        // No, proceed
+
        self.stats.record_load();
+
        let data = Arc::new(info.data()?);
+

+
        // Find an empty slot, or swap with the least popular
+
        let mut access = usize::MAX;
+
        let mut evict = 0;
+
        for (i, e) in bucket.iter().enumerate() {
+
            match e {
+
                Some(entry) => {
+
                    let hits = entry.hits();
+
                    if hits < access {
+
                        access = hits;
+
                        evict = i;
+
                    }
+
                },
+
                None => {
+
                    evict = i;
+
                    break;
+
                },
+
            }
+
        }
+
        let mut entries = Guard::into_inner(bucket);
+
        {
+
            // This costs `SLOTS` refcount increments if the slot is currently
+
            // borrowed.
+
            let mutti = Arc::make_mut(&mut entries);
+
            mutti[evict] = Some(Arc::clone(&data));
+
        }
+
        self.entries[idx].store(entries);
+
        drop(lock);
+

+
        data.hit();
+
        Ok(data)
+
    }
+
}
added link-git/src/odb/window/metrics.rs
@@ -0,0 +1,79 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::sync::atomic::{AtomicUsize, Ordering};
+

+
use tracing::trace;
+

+
pub struct StatsView {
+
    /// Total number of times the requested data was found in the cache.
+
    pub cache_hits: usize,
+
    /// Total number of times the requested data was not found in the cache.
+
    ///
+
    /// Note that a cache hit can occur after a miss if another thread was
+
    /// faster to fill in the missing entry. Thus, `cache_hits + cache_misses`
+
    /// does not necessarily sum up to the number of cache accesses.
+
    pub cache_misses: usize,
+
    /// Total number of times a pack file was attempted to be loaded from disk
+
    /// (incl. failed attempts).
+
    pub file_loads: usize,
+
    /// Total number of pack files the cache holds on to.
+
    pub open_files: usize,
+
}
+

+
#[derive(Default)]
+
pub struct Stats {
+
    hits: AtomicUsize,
+
    miss: AtomicUsize,
+
    load: AtomicUsize,
+
}
+

+
pub trait Metrics {
+
    type Snapshot;
+

+
    fn record_hit(&self);
+
    fn record_miss(&self);
+
    fn record_load(&self);
+

+
    fn snapshot(&self, open_files: usize) -> Self::Snapshot;
+
}
+

+
impl Metrics for Stats {
+
    type Snapshot = StatsView;
+

+
    fn record_hit(&self) {
+
        trace!("cache hit");
+
        self.hits.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn record_miss(&self) {
+
        trace!("cache miss");
+
        self.miss.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn record_load(&self) {
+
        trace!("pack load");
+
        self.load.fetch_add(1, Ordering::Relaxed);
+
    }
+

+
    fn snapshot(&self, open_files: usize) -> Self::Snapshot {
+
        StatsView {
+
            cache_hits: self.hits.load(Ordering::Relaxed),
+
            cache_misses: self.miss.load(Ordering::Relaxed),
+
            file_loads: self.load.load(Ordering::Relaxed),
+
            open_files,
+
        }
+
    }
+
}
+

+
impl Metrics for () {
+
    type Snapshot = ();
+

+
    fn record_hit(&self) {}
+
    fn record_miss(&self) {}
+
    fn record_load(&self) {}
+

+
    fn snapshot(&self, _: usize) -> Self::Snapshot {}
+
}
added link-git/src/protocol.rs
@@ -0,0 +1,30 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use bstr::ByteSlice as _;
+
use git_protocol::transport::client;
+
use versions::Version;
+

+
pub mod fetch;
+
pub mod ls;
+
pub mod packwriter;
+
pub mod take;
+
pub mod transport;
+
pub mod upload_pack;
+

+
pub use fetch::{fetch, Ref};
+
pub use ls::ls_refs;
+
pub use packwriter::PackWriter;
+
pub use upload_pack::upload_pack;
+

+
pub use git_hash::{oid, ObjectId};
+

+
fn remote_git_version(caps: &client::Capabilities) -> Option<Version> {
+
    let agent = caps.capability("agent").and_then(|cap| {
+
        cap.value()
+
            .and_then(|bs| bs.to_str().map(|s| s.to_owned()).ok())
+
    })?;
+
    Version::new(agent.strip_prefix("git/")?)
+
}
added link-git/src/protocol/fetch.rs
@@ -0,0 +1,290 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    future::Future,
+
    io,
+
    mem,
+
    pin::Pin,
+
    sync::{
+
        atomic::{AtomicBool, Ordering},
+
        Arc,
+
    },
+
    task::{Context, Poll},
+
};
+

+
use bstr::{BString, ByteSlice as _};
+
use futures_lite::{
+
    future,
+
    io::{AsyncBufRead, AsyncRead, AsyncWrite},
+
};
+
use git_features::progress::{self, Progress};
+
use git_protocol::{
+
    fetch::{response, Action, Arguments, Delegate, DelegateBlocking, LsRefsAction, Response},
+
    transport::client,
+
};
+
use once_cell::sync::Lazy;
+
use pin_project::{pin_project, pinned_drop};
+
use versions::Version;
+

+
pub use git_hash::ObjectId;
+
pub use git_protocol::fetch::Ref;
+

+
use super::{packwriter::PackWriter, remote_git_version, transport};
+

+
// Work around `git-upload-pack` not handling namespaces properly,
+
//
+
// cf. https://lore.kernel.org/git/CD2XNXHACAXS.13J6JTWZPO1JA@schmidt/
+
// Fixed in `git.git` 1ab13eb, which should land in 2.34
+
//
+
// Based on testing with git 2.25.1 in Ubuntu 20.04, this workaround is
+
// not needed. Hence the checked version is lowered to 2.25.0.
+
fn must_namespace_want_ref(caps: &client::Capabilities) -> bool {
+
    static FIXED_AFTER: Lazy<Version> = Lazy::new(|| Version::new("2.25.0").unwrap());
+

+
    remote_git_version(caps)
+
        .map(|version| version <= *FIXED_AFTER)
+
        .unwrap_or(false)
+
}
+

+
#[derive(Debug)]
+
pub struct Options {
+
    /// The remote (logical) repository to fetch from.
+
    ///
+
    /// Normally, this is the path to a repo on the remote side (eg.
+
    /// `/git.git`). `radicle-link` serves only a single namespaced repo, so
+
    /// this value should be the name of a namespace.
+
    pub repo: BString,
+

+
    /// [Extra Parameters][extra] to send with the initial transport header.
+
    ///
+
    /// [extra]: https://git.kernel.org/pub/scm/git/git.git/tree/Documentation/technical/pack-protocol.txt#n52
+
    pub extra_params: Vec<(String, Option<String>)>,
+

+
    /// [`ObjectId`]s to send as `want` lines.
+
    pub wants: Vec<ObjectId>,
+

+
    /// [`ObjectId`]s to send as `have` lines.
+
    pub haves: Vec<ObjectId>,
+

+
    /// Known refs to ask the server to include in the packfile.
+
    pub want_refs: Vec<BString>,
+
}
+

+
/// Result of a succesful [`fetch`].
+
#[derive(Debug)]
+
pub struct Outputs<T> {
+
    /// The `wanted-refs` as acknowledged by the server.
+
    pub wanted_refs: Vec<Ref>,
+
    /// If a packfile was received successfully, some info about it.
+
    pub pack: Option<T>,
+
}
+

+
impl<T> Default for Outputs<T> {
+
    fn default() -> Self {
+
        Self {
+
            wanted_refs: Vec::new(),
+
            pack: None,
+
        }
+
    }
+
}
+

+
/// [`Delegate`] driving the fetch end of the [pack protocol].
+
///
+
/// [pack protocol]: https://git.kernel.org/pub/scm/git/git.git/tree/Documentation/technical/pack-protocol.txt
+
pub struct Fetch<P, O> {
+
    opt: Options,
+
    pack_writer: P,
+
    out: Outputs<O>,
+
    need_namespaced_want_ref: bool,
+
}
+

+
impl<P, O> Fetch<P, O> {
+
    pub fn new(opt: Options, pack_writer: P) -> Self {
+
        Self {
+
            opt,
+
            pack_writer,
+
            out: Outputs::default(),
+
            need_namespaced_want_ref: false,
+
        }
+
    }
+

+
    pub fn outputs(&self) -> &Outputs<O> {
+
        &self.out
+
    }
+

+
    pub fn take_outputs(&mut self) -> Outputs<O> {
+
        mem::take(&mut self.out)
+
    }
+
}
+

+
impl<P: PackWriter> DelegateBlocking for Fetch<P, P::Output> {
+
    fn handshake_extra_parameters(&self) -> Vec<(String, Option<String>)> {
+
        self.opt.extra_params.clone()
+
    }
+

+
    fn prepare_ls_refs(
+
        &mut self,
+
        _: &client::Capabilities,
+
        _: &mut Vec<BString>,
+
        _: &mut Vec<(&str, Option<&str>)>,
+
    ) -> io::Result<LsRefsAction> {
+
        Ok(LsRefsAction::Skip)
+
    }
+

+
    fn prepare_fetch(
+
        &mut self,
+
        _: git_protocol::transport::Protocol,
+
        caps: &client::Capabilities,
+
        _: &mut Vec<(&str, Option<&str>)>,
+
        _: &[Ref],
+
    ) -> io::Result<Action> {
+
        if !self.opt.want_refs.is_empty() && !remote_supports_ref_in_want(caps) {
+
            return Err(io::Error::new(
+
                io::ErrorKind::Unsupported,
+
                "`want-ref`s given, but server does not support `ref-in-want`",
+
            ));
+
        }
+

+
        if self.opt.wants.is_empty() && self.opt.want_refs.is_empty() {
+
            return Err(io::Error::new(
+
                io::ErrorKind::InvalidData,
+
                "`fetch` is empty",
+
            ));
+
        }
+

+
        self.need_namespaced_want_ref = must_namespace_want_ref(caps);
+

+
        Ok(Action::Continue)
+
    }
+

+
    fn negotiate(
+
        &mut self,
+
        _: &[Ref],
+
        args: &mut Arguments,
+
        _: Option<&Response>,
+
    ) -> io::Result<Action> {
+
        for oid in &self.opt.wants {
+
            args.want(oid);
+
        }
+

+
        for oid in &self.opt.haves {
+
            args.have(oid)
+
        }
+

+
        for name in &self.opt.want_refs {
+
            if self.need_namespaced_want_ref {
+
                let want_ref = format!("refs/namespaces/{}/{}", self.opt.repo, name);
+
                args.want_ref(BString::from(want_ref).as_bstr());
+
            } else {
+
                args.want_ref(name.as_bstr());
+
            }
+
        }
+

+
        // send done, as we don't bother with further negotiation
+
        Ok(Action::Cancel)
+
    }
+
}
+

+
#[async_trait(?Send)]
+
impl<P: PackWriter> Delegate for Fetch<P, P::Output> {
+
    async fn receive_pack(
+
        &mut self,
+
        pack: impl AsyncBufRead + Unpin + 'async_trait,
+
        prog: impl Progress,
+
        _: &[Ref],
+
        resp: &Response,
+
    ) -> io::Result<()> {
+
        // Strip any namespaces leaked by the other end due to workarounds
+
        let namespace = format!("refs/namespaces/{}/", self.opt.repo);
+
        self.out.wanted_refs.extend(resp.wanted_refs().iter().map(
+
            |response::WantedRef { id, path }| {
+
                Ref::Direct {
+
                    path: path
+
                        .strip_prefix(namespace.as_bytes())
+
                        .map(BString::from)
+
                        .unwrap_or_else(|| path.clone()),
+
                    object: *id,
+
                }
+
            },
+
        ));
+
        let out = self.pack_writer.write_pack(pack, prog)?;
+
        self.out.pack = Some(out);
+

+
        Ok(())
+
    }
+
}
+

+
/// Future created by the [`fetch`] function.
+
///
+
/// Ensures that a running inner [`PackWriter`] is cancelled when the
+
/// [`Fetching`] future is dropped without also dropping the [`AsyncRead`] data
+
/// source.
+
#[pin_project(PinnedDrop)]
+
struct Fetching<T> {
+
    stop: Arc<AtomicBool>,
+
    #[pin]
+
    task: T,
+
}
+

+
#[pinned_drop]
+
impl<T> PinnedDrop for Fetching<T> {
+
    fn drop(self: Pin<&mut Self>) {
+
        self.stop.store(true, Ordering::Release)
+
    }
+
}
+

+
impl<T> Future for Fetching<T>
+
where
+
    T: Future,
+
{
+
    type Output = T::Output;
+

+
    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
+
        self.project().task.poll(cx)
+
    }
+
}
+

+
pub fn fetch<B, P, R, W>(
+
    opt: Options,
+
    build_pack_writer: B,
+
    recv: R,
+
    send: W,
+
) -> impl Future<Output = io::Result<Outputs<P::Output>>>
+
where
+
    B: FnOnce(Arc<AtomicBool>) -> P,
+
    P: PackWriter + Send + 'static,
+
    P::Output: Send + 'static,
+
    R: AsyncRead + Unpin + Send + 'static,
+
    W: AsyncWrite + Unpin + Send + 'static,
+
{
+
    let stop = Arc::new(AtomicBool::new(false));
+
    let task = blocking::unblock({
+
        let mut conn = transport::Stateless::new(opt.repo.clone(), recv, send);
+
        let pack_writer = build_pack_writer(Arc::clone(&stop));
+

+
        move || {
+
            let mut delegate = Fetch::new(opt, pack_writer);
+
            future::block_on(git_protocol::fetch(
+
                &mut conn,
+
                &mut delegate,
+
                |_| unreachable!("credentials helper requested"),
+
                progress::Discard,
+
                git_protocol::FetchConnection::AllowReuse,
+
            ))
+
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+

+
            Ok(delegate.out)
+
        }
+
    });
+

+
    Fetching { stop, task }
+
}
+

+
fn remote_supports_ref_in_want(caps: &client::Capabilities) -> bool {
+
    caps.capability("fetch")
+
        .and_then(|cap| cap.supports("ref-in-want"))
+
        .unwrap_or(false)
+
}
added link-git/src/protocol/ls.rs
@@ -0,0 +1,151 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::io;
+

+
use bstr::{BString, ByteVec as _};
+
use futures_lite::io::{AsyncBufRead, AsyncRead, AsyncWrite};
+
use git_features::progress::{self, Progress};
+
use git_protocol::{
+
    fetch::{Action, Arguments, Delegate, DelegateBlocking, LsRefsAction, Response},
+
    transport::client,
+
};
+
use once_cell::sync::Lazy;
+
use versions::Version;
+

+
pub use git_protocol::fetch::Ref;
+

+
use super::{remote_git_version, transport};
+

+
// Work around `git-upload-pack` not handling namespaces properly
+
//
+
// cf. https://lore.kernel.org/git/pMV5dJabxOBTD8kJBaPuWK0aS6OJhRQ7YFGwfhPCeSJEbPDrIFBza36nXBCgUCeUJWGmpjPI1rlOGvZJEh71Ruz4SqljndUwOCoBUDRHRDU=@eagain.st/
+
//
+
// Based on testing with git 2.25.1 in Ubuntu 20.04, this workaround is
+
// not needed. Hence the checked version is lowered to 2.25.0.
+
fn must_namespace(caps: &client::Capabilities) -> bool {
+
    static MIN_GIT_VERSION_NAMESPACES: Lazy<Version> =
+
        Lazy::new(|| Version::new("2.25.0").unwrap());
+

+
    remote_git_version(caps)
+
        .map(|version| version < *MIN_GIT_VERSION_NAMESPACES)
+
        .unwrap_or(false)
+
}
+

+
#[derive(Debug)]
+
pub struct Options {
+
    /// The remote (logical) repository to fetch from.
+
    ///
+
    /// Normally, this is the path to a repo on the remote side (eg.
+
    /// `/git.git`). `radicle-link` serves only a single namespaced repo, so
+
    /// this value should be the name of a namespace.
+
    pub repo: BString,
+

+
    /// [Extra Parameters][extra] to send with the initial transport header.
+
    ///
+
    /// [extra]: https://git.kernel.org/pub/scm/git/git.git/tree/Documentation/technical/pack-protocol.txt#n52
+
    pub extra_params: Vec<(String, Option<String>)>,
+

+
    /// Prefixes of refs to ask the server to advertise via `ls-refs`.
+
    ///
+
    /// If the [`Vec`] is empty, the server is asked to return all refs it knows
+
    /// about. Otherwise, the server is asked to only return refs matching
+
    /// the given prefixes.
+
    pub ref_prefixes: Vec<BString>,
+
}
+

+
/// [`Delegate`] for running a stateless `ls-refs` command.
+
pub struct LsRefs {
+
    opt: Options,
+
    out: Vec<Ref>,
+
}
+

+
impl LsRefs {
+
    pub fn new(opt: Options) -> Self {
+
        Self {
+
            opt,
+
            out: Vec::new(),
+
        }
+
    }
+
}
+

+
impl DelegateBlocking for LsRefs {
+
    fn handshake_extra_parameters(&self) -> Vec<(String, Option<String>)> {
+
        self.opt.extra_params.clone()
+
    }
+

+
    fn prepare_ls_refs(
+
        &mut self,
+
        caps: &client::Capabilities,
+
        args: &mut Vec<BString>,
+
        _: &mut Vec<(&str, Option<&str>)>,
+
    ) -> io::Result<LsRefsAction> {
+
        let must_namespace = must_namespace(caps);
+
        for prefix in &self.opt.ref_prefixes {
+
            let mut arg = BString::from("ref-prefix ");
+
            if must_namespace {
+
                arg.push_str("refs/namespaces/");
+
                arg.push_str(&self.opt.repo);
+
                arg.push_char('/');
+
            }
+
            arg.push_str(prefix);
+
            args.push(arg)
+
        }
+
        Ok(LsRefsAction::Continue)
+
    }
+

+
    fn prepare_fetch(
+
        &mut self,
+
        _: git_protocol::transport::Protocol,
+
        _: &client::Capabilities,
+
        _: &mut Vec<(&str, Option<&str>)>,
+
        refs: &[Ref],
+
    ) -> io::Result<Action> {
+
        self.out.extend_from_slice(refs);
+
        Ok(Action::Cancel)
+
    }
+

+
    fn negotiate(
+
        &mut self,
+
        _: &[Ref],
+
        _: &mut Arguments,
+
        _: Option<&Response>,
+
    ) -> io::Result<Action> {
+
        unreachable!("`negotiate` called even though no `fetch` command was sent")
+
    }
+
}
+

+
#[async_trait(?Send)]
+
impl Delegate for LsRefs {
+
    async fn receive_pack(
+
        &mut self,
+
        _: impl AsyncBufRead + Unpin + 'async_trait,
+
        _: impl Progress,
+
        _: &[Ref],
+
        _: &Response,
+
    ) -> io::Result<()> {
+
        unreachable!("`receive_pack` called even though no `fetch` command was sent")
+
    }
+
}
+

+
pub async fn ls_refs<R, W>(opt: Options, recv: R, send: W) -> io::Result<Vec<Ref>>
+
where
+
    R: AsyncRead + Unpin,
+
    W: AsyncWrite + Unpin,
+
{
+
    let mut conn = transport::Stateless::new(opt.repo.clone(), recv, send);
+
    let mut delegate = LsRefs::new(opt);
+
    git_protocol::fetch(
+
        &mut conn,
+
        &mut delegate,
+
        |_| unreachable!("credentials helper requested"),
+
        progress::Discard,
+
        git_protocol::FetchConnection::AllowReuse,
+
    )
+
    .await
+
    .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+

+
    Ok(delegate.out)
+
}
added link-git/src/protocol/packwriter.rs
@@ -0,0 +1,271 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    io,
+
    path::{Path, PathBuf},
+
    sync::{
+
        atomic::{AtomicBool, Ordering},
+
        Arc,
+
    },
+
};
+

+
use futures_lite::io::{AsyncBufRead, BlockOn};
+
use git_features::progress::Progress;
+
use git_hash::ObjectId;
+
use git_odb::{self as odb, pack};
+

+
use super::take::TryTake;
+

+
#[cfg(feature = "git2")]
+
pub use libgit::Libgit;
+

+
/// What to do with the `packfile` response.
+
///
+
/// _This is mostly the same as [`git_protocol::fetch::Delegate`], but without
+
/// incurring the [`git_protocol::fetch::DelegateBlocking`] super-trait
+
/// constraint. We can simply make [`crate::protocol::fetch::Fetch`] parametric
+
/// over the packfile sink._
+
pub trait PackWriter {
+
    type Output;
+

+
    fn write_pack(
+
        &self,
+
        pack: impl AsyncBufRead + Unpin,
+
        progress: impl Progress,
+
    ) -> io::Result<Self::Output>;
+
}
+

+
#[derive(Clone, Copy, Debug)]
+
pub struct Options {
+
    /// How many threads the packfile indexer is allowed to spawn. `None` means
+
    /// unlimited.
+
    pub max_indexer_threads: Option<usize>,
+
    /// The maximum size in bytes of the packfile.
+
    ///
+
    /// If the remote sends a larger file, the transfer will be aborted.
+
    pub max_pack_bytes: u64,
+
}
+

+
impl Default for Options {
+
    fn default() -> Self {
+
        Self {
+
            max_indexer_threads: Some(1),
+
            max_pack_bytes: u64::MAX,
+
        }
+
    }
+
}
+

+
#[cfg(feature = "git2")]
+
pub mod libgit {
+
    use super::*;
+

+
    #[derive(Clone, Copy, Debug)]
+
    pub struct PackReceived {
+
        pub objects: usize,
+
        pub local_objects: usize,
+
        pub deltas: usize,
+
    }
+

+
    impl From<git2::Progress<'_>> for PackReceived {
+
        fn from(p: git2::Progress<'_>) -> Self {
+
            Self {
+
                objects: p.indexed_objects(),
+
                local_objects: p.local_objects(),
+
                deltas: p.indexed_deltas(),
+
            }
+
        }
+
    }
+

+
    pub struct Libgit {
+
        opt: Options,
+
        repo: git2::Repository,
+
        stop: Arc<AtomicBool>,
+
    }
+

+
    impl Libgit {
+
        pub fn new(opt: Options, repo: git2::Repository, stop: Arc<AtomicBool>) -> Self {
+
            Self { opt, repo, stop }
+
        }
+

+
        fn guard_cancelled(&self) -> io::Result<()> {
+
            if self.stop.load(Ordering::Acquire) {
+
                Err(io::Error::new(io::ErrorKind::Interrupted, "cancelled"))
+
            } else {
+
                Ok(())
+
            }
+
        }
+
    }
+

+
    impl PackWriter for Libgit {
+
        type Output = Option<PackReceived>;
+

+
        fn write_pack(
+
            &self,
+
            pack: impl AsyncBufRead + Unpin,
+
            _: impl Progress,
+
        ) -> io::Result<Self::Output> {
+
            let mut out = None;
+

+
            let odb = self.repo.odb().map_err(io_error)?;
+
            let mut writer = odb.packwriter().map_err(io_error)?;
+

+
            self.guard_cancelled()?;
+
            io::copy(
+
                &mut BlockOn::new(TryTake::new(pack, self.opt.max_pack_bytes)),
+
                &mut writer,
+
            )?;
+

+
            self.guard_cancelled()?;
+
            writer
+
                .progress(|p| {
+
                    out = Some(p.to_owned());
+
                    true
+
                })
+
                .commit()
+
                .map(|_| ())
+
                .map_err(io_error)?;
+
            // Convince borrowchk that `out` can not possibly be borrowed anymore
+
            drop(writer);
+

+
            Ok(out.map(Into::into))
+
        }
+
    }
+

+
    fn io_error(e: git2::Error) -> io::Error {
+
        io::Error::new(io::ErrorKind::Other, e)
+
    }
+
}
+

+
pub type PackReceived = pack::bundle::write::Outcome;
+

+
/// A lookup function to help "thicken" thin packs by finding missing base
+
/// objects.
+
///
+
/// The impl provided for [`odb::linked::Store`] does not use any pack caching.
+
pub trait Thickener {
+
    fn find_object<'a>(&self, id: ObjectId, buf: &'a mut Vec<u8>)
+
        -> Option<pack::data::Object<'a>>;
+
}
+

+
impl Thickener for odb::linked::Store {
+
    fn find_object<'a>(
+
        &self,
+
        id: ObjectId,
+
        buf: &'a mut Vec<u8>,
+
    ) -> Option<pack::data::Object<'a>> {
+
        use git_odb::FindExt as _;
+

+
        self.find(id, buf, &mut pack::cache::Never).ok()
+
    }
+
}
+

+
/// A factory spewing out new [`Thickener`]s with static lifetimes.
+
///
+
/// `gitoxide` doesn't currently allow us to initialise thickening lazily (the
+
/// pack file may not be thin after all), but requires a static lookup function.
+
/// Instead of initialising a new [`odb::linked::Store`] for every pack stream,
+
/// users may share a pre-initialised object database provided appropriate
+
/// thread safety measures.
+
pub trait BuildThickener {
+
    type Error: std::error::Error + Send + Sync + 'static;
+
    type Thick: Thickener + 'static;
+

+
    fn build_thickener(&self) -> Result<Self::Thick, Self::Error>;
+
}
+

+
pub struct StandardThickener {
+
    git_dir: PathBuf,
+
}
+

+
impl StandardThickener {
+
    pub fn new(git_dir: impl Into<PathBuf>) -> Self {
+
        let git_dir = git_dir.into();
+
        Self { git_dir }
+
    }
+
}
+

+
impl BuildThickener for StandardThickener {
+
    type Error = odb::linked::init::Error;
+
    type Thick = odb::linked::Store;
+

+
    fn build_thickener(&self) -> Result<Self::Thick, Self::Error> {
+
        odb::linked::Store::at(self.git_dir.join("objects"))
+
    }
+
}
+

+
/// The default [`PackWriter`].
+
///
+
/// Writes the packfile into the given output directory, along with a v2
+
/// index. The packfile is verified.
+
pub struct Standard<F> {
+
    git_dir: PathBuf,
+
    opt: Options,
+
    thick: F,
+
    stop: Arc<AtomicBool>,
+
}
+

+
impl<F> Standard<F> {
+
    pub fn new(git_dir: impl AsRef<Path>, opt: Options, thick: F, stop: Arc<AtomicBool>) -> Self {
+
        Self {
+
            git_dir: git_dir.as_ref().to_owned(),
+
            opt,
+
            thick,
+
            stop,
+
        }
+
    }
+
}
+

+
impl<F> Drop for Standard<F> {
+
    fn drop(&mut self) {
+
        self.stop.store(true, Ordering::Release);
+
    }
+
}
+

+
impl<F: BuildThickener> PackWriter for Standard<F> {
+
    type Output = PackReceived;
+

+
    fn write_pack(
+
        &self,
+
        pack: impl AsyncBufRead + Unpin,
+
        prog: impl Progress,
+
    ) -> io::Result<Self::Output> {
+
        use pack::{bundle::write::Options, data::input::Mode, index::Version, Bundle};
+

+
        let opts = Options {
+
            thread_limit: self.opt.max_indexer_threads,
+
            index_kind: Version::V2,
+
            iteration_mode: Mode::Verify,
+
        };
+
        let thickener = self
+
            .thick
+
            .build_thickener()
+
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+
        Bundle::write_to_directory(
+
            BlockOn::new(TryTake::new(pack, self.opt.max_pack_bytes)),
+
            Some(self.git_dir.join("objects").join("pack")),
+
            prog,
+
            &self.stop,
+
            Some(Box::new(move |oid, buf| thickener.find_object(oid, buf))),
+
            opts,
+
        )
+
        .map_err(|e| io::Error::new(io::ErrorKind::Other, e))
+
    }
+
}
+

+
/// No-op [`PackWriter`] which just drains the input.
+
pub struct Discard;
+

+
impl PackWriter for Discard {
+
    type Output = u64;
+

+
    fn write_pack(
+
        &self,
+
        pack: impl AsyncBufRead + Unpin,
+
        _: impl Progress,
+
    ) -> io::Result<Self::Output> {
+
        io::copy(&mut BlockOn::new(pack), &mut io::sink())
+
    }
+
}
added link-git/src/protocol/take.rs
@@ -0,0 +1,76 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    io,
+
    pin::Pin,
+
    task::{Context, Poll},
+
};
+

+
use futures_lite::io::{AsyncBufRead, AsyncRead};
+

+
/// Like [`futures_lite::io::Take`], but returns an error if and when the
+
/// `limit` is exceeded.
+
///
+
/// Note that, unlike [`futures_lite::io::Take`], if a single poll reads past
+
/// the limit, the excess bytes are _not_ discarded. Instead, an error is
+
/// returned on the next poll.
+
pub struct TryTake<R> {
+
    limit: u64,
+
    inner: R,
+
}
+

+
impl<R> TryTake<R> {
+
    pub fn new(inner: R, limit: u64) -> Self {
+
        Self { limit, inner }
+
    }
+
}
+

+
impl<R> AsyncRead for TryTake<R>
+
where
+
    R: AsyncRead + Unpin,
+
{
+
    fn poll_read(
+
        self: Pin<&mut Self>,
+
        cx: &mut Context,
+
        buf: &mut [u8],
+
    ) -> Poll<Result<usize, io::Error>> {
+
        if self.limit == 0 {
+
            return Poll::Ready(Err(io::Error::new(
+
                io::ErrorKind::Other,
+
                "max input size exceeded",
+
            )));
+
        }
+

+
        let this = self.get_mut();
+
        Pin::new(&mut this.inner).poll_read(cx, buf).map(|ready| {
+
            if let Ok(siz) = ready {
+
                this.limit = this.limit.saturating_sub(siz as u64);
+
            }
+

+
            ready
+
        })
+
    }
+
}
+

+
impl<R> AsyncBufRead for TryTake<R>
+
where
+
    R: AsyncBufRead + Unpin,
+
{
+
    fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<&[u8], io::Error>> {
+
        if self.limit == 0 {
+
            return Poll::Ready(Err(io::Error::new(
+
                io::ErrorKind::Other,
+
                "max input size exceeded",
+
            )));
+
        }
+

+
        Pin::new(&mut self.get_mut().inner).poll_fill_buf(cx)
+
    }
+

+
    fn consume(self: Pin<&mut Self>, amt: usize) {
+
        Pin::new(&mut self.get_mut().inner).consume(amt)
+
    }
+
}
added link-git/src/protocol/transport.rs
@@ -0,0 +1,84 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use bstr::BString;
+
use futures_lite::io::{AsyncRead, AsyncWrite};
+
use git_protocol::transport::{
+
    client::{
+
        self,
+
        git::{ConnectMode, Connection},
+
        SetServiceResponse,
+
        Transport,
+
        TransportWithoutIO,
+
    },
+
    Protocol,
+
    Service,
+
};
+

+
pub struct Stateless<R, W> {
+
    inner: Connection<R, W>,
+
}
+

+
impl<R, W> Stateless<R, W>
+
where
+
    R: AsyncRead + Unpin,
+
    W: AsyncWrite + Unpin,
+
{
+
    pub fn new(repo: BString, recv: R, send: W) -> Self {
+
        let url = format!("rad://{}", repo);
+
        let inner = Connection::new(
+
            recv,
+
            send,
+
            Protocol::V2,
+
            repo,
+
            None::<(String, Option<u16>)>,
+
            ConnectMode::Daemon,
+
        )
+
        .custom_url(Some(url));
+

+
        Self { inner }
+
    }
+
}
+

+
impl<R, W> TransportWithoutIO for Stateless<R, W>
+
where
+
    R: AsyncRead + Unpin,
+
    W: AsyncWrite + Unpin,
+
{
+
    fn request(
+
        &mut self,
+
        write_mode: client::WriteMode,
+
        on_into_read: client::MessageKind,
+
    ) -> Result<client::RequestWriter<'_>, client::Error> {
+
        self.inner.request(write_mode, on_into_read)
+
    }
+

+
    fn to_url(&self) -> String {
+
        self.inner.to_url()
+
    }
+

+
    fn supported_protocol_versions(&self) -> &[Protocol] {
+
        &[Protocol::V2]
+
    }
+

+
    fn connection_persists_across_multiple_requests(&self) -> bool {
+
        false
+
    }
+
}
+

+
#[async_trait(?Send)]
+
impl<R, W> Transport for Stateless<R, W>
+
where
+
    R: AsyncRead + Unpin,
+
    W: AsyncWrite + Unpin,
+
{
+
    async fn handshake<'a>(
+
        &mut self,
+
        service: Service,
+
        extra_parameters: &'a [(&'a str, Option<&'a str>)],
+
    ) -> Result<SetServiceResponse<'_>, client::Error> {
+
        self.inner.handshake(service, extra_parameters).await
+
    }
+
}
added link-git/src/protocol/upload_pack.rs
@@ -0,0 +1,238 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{future::Future, io, path::Path, process::ExitStatus, str::FromStr};
+

+
use async_process::{Command, Stdio};
+
use futures_lite::io::{copy, AsyncBufReadExt as _, AsyncRead, AsyncWrite, BufReader};
+
use futures_util::try_join;
+
use git_packetline::{self as packetline, PacketLineRef};
+
use once_cell::sync::Lazy;
+
use versions::Version;
+

+
mod legacy;
+

+
#[derive(Debug, PartialEq, Eq)]
+
pub struct Header {
+
    pub path: String,
+
    pub host: Option<(String, Option<u16>)>,
+
    pub extra: Vec<(String, Option<String>)>,
+
}
+

+
impl FromStr for Header {
+
    type Err = &'static str;
+

+
    fn from_str(s: &str) -> Result<Self, Self::Err> {
+
        let mut parts = s
+
            .strip_prefix("git-upload-pack ")
+
            .ok_or("unsupported service")?
+
            .split_terminator('\0');
+

+
        let path = parts.next().ok_or("missing path").and_then(|path| {
+
            if path.is_empty() {
+
                Err("empty path")
+
            } else {
+
                Ok(path.to_owned())
+
            }
+
        })?;
+
        let host = match parts.next() {
+
            None | Some("") => None,
+
            Some(host) => match host.strip_prefix("host=") {
+
                None => return Err("invalid host"),
+
                Some(host) => match host.split_once(':') {
+
                    None => Some((host.to_owned(), None)),
+
                    Some((host, port)) => {
+
                        let port = port.parse::<u16>().or(Err("invalid port"))?;
+
                        Some((host.to_owned(), Some(port)))
+
                    },
+
                },
+
            },
+
        };
+
        let extra = parts
+
            .skip_while(|part| part.is_empty())
+
            .map(|part| match part.split_once('=') {
+
                None => (part.to_owned(), None),
+
                Some((k, v)) => (k.to_owned(), Some(v.to_owned())),
+
            })
+
            .collect();
+

+
        Ok(Self { path, host, extra })
+
    }
+
}
+

+
pub async fn upload_pack<R, W>(
+
    git_dir: impl AsRef<Path>,
+
    recv: R,
+
    mut send: W,
+
) -> io::Result<(Header, impl Future<Output = io::Result<ExitStatus>>)>
+
where
+
    R: AsyncRead + Unpin,
+
    W: AsyncWrite + Unpin,
+
{
+
    let mut recv = BufReader::new(recv);
+
    let header: Header = match recv.fill_buf().await?.first() {
+
        // legacy clients don't send a proper pktline header :(
+
        Some(b'g') => {
+
            let mut buf = String::with_capacity(256);
+
            recv.read_line(&mut buf).await?;
+
            buf.parse().map_err(invalid_data)?
+
        },
+
        Some(_) => {
+
            let mut pktline = packetline::StreamingPeekableIter::new(recv, &[]);
+
            let pkt = pktline
+
                .read_line()
+
                .await
+
                .ok_or_else(|| invalid_data("missing header"))?
+
                .map_err(invalid_data)?
+
                .map_err(invalid_data)?;
+
            let hdr = match pkt {
+
                PacketLineRef::Data(data) => std::str::from_utf8(data)
+
                    .map_err(invalid_data)?
+
                    .parse()
+
                    .map_err(invalid_data),
+
                _ => Err(invalid_data("not a header packet")),
+
            }?;
+
            recv = pktline.into_inner();
+

+
            hdr
+
        },
+
        None => {
+
            return Err(io::Error::new(
+
                io::ErrorKind::UnexpectedEof,
+
                "expected header",
+
            ))
+
        },
+
    };
+

+
    let namespace = header
+
        .path
+
        // legacy clients redundantly send a full URN
+
        .strip_prefix("rad:git:")
+
        .map(ToOwned::to_owned)
+
        .unwrap_or_else(|| header.path.clone());
+
    let protocol_version = header
+
        .extra
+
        .iter()
+
        .find_map(|kv| match kv {
+
            (ref k, Some(v)) if k == "version" => {
+
                let version = match v.as_str() {
+
                    "2" => 2,
+
                    "1" => 1,
+
                    _ => 0,
+
                };
+
                Some(version)
+
            },
+
            _ => None,
+
        })
+
        .unwrap_or(0);
+
    // legacy
+
    let stateless_ls = header.extra.iter().any(|(k, _)| k == "ls");
+

+
    let fut = async move {
+
        if protocol_version < 2 {
+
            if stateless_ls {
+
                return legacy::advertise_refs(git_dir, &namespace, recv, send).await;
+
            }
+
        } else {
+
            advertise_capabilities(&mut send).await?;
+
        }
+

+
        let mut child = {
+
            let mut cmd = Command::new("git");
+
            cmd.current_dir(git_dir)
+
                .env_clear()
+
                .envs(
+
                    std::env::vars()
+
                        .filter(|(key, _)| key == "PATH" || key.starts_with("GIT_TRACE")),
+
                )
+
                .env("GIT_PROTOCOL", format!("version={}", protocol_version))
+
                .env("GIT_NAMESPACE", namespace)
+
                .args(&[
+
                    "-c",
+
                    "uploadpack.allowanysha1inwant=true",
+
                    "-c",
+
                    "uploadpack.allowrefinwant=true",
+
                    "-c",
+
                    "lsrefs.unborn=ignore",
+
                    "upload-pack",
+
                    "--strict",
+
                    "--stateless-rpc",
+
                    ".",
+
                ])
+
                .stdout(Stdio::piped())
+
                .stdin(Stdio::piped())
+
                .stderr(Stdio::inherit())
+
                .kill_on_drop(true)
+
                .reap_on_drop(true)
+
                .spawn()?
+
        };
+

+
        let mut stdin = child.stdin.take().unwrap();
+
        let mut stdout = child.stdout.take().unwrap();
+

+
        try_join!(
+
            copy(&mut recv, &mut stdin),
+
            copy(&mut stdout, &mut send),
+
            child.status(),
+
        )
+
        .map(|(_, _, status)| status)
+
    };
+

+
    Ok((header, fut))
+
}
+

+
async fn advertise_capabilities<W>(mut send: W) -> io::Result<()>
+
where
+
    W: AsyncWrite + Unpin,
+
{
+
    // Thou shallt not upgrade your `git` installation while a link instance is
+
    // running!
+
    static GIT_VERSION: Lazy<Version> = Lazy::new(|| git_version().unwrap());
+
    static AGENT: Lazy<Vec<u8>> = Lazy::new(|| format!("agent=git/{}", *GIT_VERSION).into_bytes());
+
    static CAPABILITIES: Lazy<[&[u8]; 4]> = Lazy::new(|| {
+
        [
+
            b"version 2",
+
            AGENT.as_slice(),
+
            b"object-format=sha1",
+
            b"fetch=ref-in-want",
+
        ]
+
    });
+

+
    for cap in *CAPABILITIES {
+
        packetline::encode::text_to_write(cap, &mut send).await?;
+
    }
+
    packetline::encode::flush_to_write(&mut send).await?;
+

+
    Ok(())
+
}
+

+
fn git_version() -> io::Result<Version> {
+
    let out = std::process::Command::new("git")
+
        .arg("--version")
+
        .output()?;
+
    if !out.status.success() {
+
        return Err(io::Error::new(
+
            io::ErrorKind::Other,
+
            "failed to read `git` version",
+
        ));
+
    }
+

+
    // parse: git version 2.30.1 <other optional tokens>
+
    out.stdout
+
        .split(|x| x == &b' ')
+
        .nth(2)
+
        .and_then(|s| {
+
            let s = std::str::from_utf8(s).ok()?;
+
            Version::new(s.trim())
+
        })
+
        .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "failed to parse `git` version"))
+
}
+

+
fn invalid_data<E>(inner: E) -> io::Error
+
where
+
    E: Into<Box<dyn std::error::Error + Sync + Send>>,
+
{
+
    io::Error::new(io::ErrorKind::InvalidData, inner)
+
}
added link-git/src/protocol/upload_pack/legacy.rs
@@ -0,0 +1,105 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
// Copyright © 2021      The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{io, path::Path, process::ExitStatus};
+

+
use async_process::{Command, Stdio};
+
use futures_lite::io::{copy, AsyncRead, AsyncReadExt as _, AsyncWrite, AsyncWriteExt as _};
+
use futures_util::try_join;
+
use git_ref::{
+
    file::{Store as Refdb, WriteReflog},
+
    FullName,
+
    Reference,
+
};
+

+
pub(super) async fn advertise_refs<R, W>(
+
    git_dir: impl AsRef<Path>,
+
    namespace: &str,
+
    mut recv: R,
+
    mut send: W,
+
) -> io::Result<ExitStatus>
+
where
+
    R: AsyncRead + Unpin,
+
    W: AsyncWrite + Unpin,
+
{
+
    let unhide = blocking::unblock({
+
        let git_dir = git_dir.as_ref().to_path_buf();
+
        let prefix = Path::new("refs")
+
            .join("namespaces")
+
            .join(namespace)
+
            .join("refs");
+
        move || -> io::Result<Vec<FullName>> {
+
            let refdb = Refdb::at(git_dir, WriteReflog::Disable);
+
            let packed = refdb
+
                .packed_buffer()
+
                .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+

+
            let refs = refdb
+
                .iter_prefixed(packed.as_ref(), prefix)?
+
                .filter_map(|r| r.ok().map(|Reference { name, .. }| name))
+
                .filter(|name| {
+
                    const PATTERN: &[u8] = b"rad/ids/any";
+
                    const SEPARAT: u8 = b'/';
+
                    name.as_bstr()
+
                        .rsplit(|b| b == &SEPARAT)
+
                        .zip(PATTERN.rsplit(|b| b == &SEPARAT))
+
                        .skip(1)
+
                        .all(|(a, b)| a == b)
+
                })
+
                .collect::<Vec<_>>();
+

+
            Ok(refs)
+
        }
+
    })
+
    .await?;
+

+
    let mut child = {
+
        let mut cmd = Command::new("git");
+
        cmd.current_dir(git_dir)
+
            .env_clear()
+
            .envs(std::env::vars().filter(|(key, _)| key == "PATH" || key.starts_with("GIT_TRACE")))
+
            .arg("-c")
+
            .arg("uploadpack.hiderefs=refs/")
+
            .arg("-c")
+
            .arg(format!(
+
                "uploadpack.hiderefs=!refs/namespaces/{}",
+
                namespace
+
            ));
+

+
        for r in unhide {
+
            cmd.arg("-c")
+
                .arg(format!("uploadpack.hiderefs=!{}", r.as_bstr()));
+
        }
+

+
        cmd.args(&[
+
            "upload-pack",
+
            "--strict",
+
            "--timeout=5",
+
            "--stateless-rpc",
+
            "--advertise-refs",
+
            ".",
+
        ])
+
        .stdout(Stdio::piped())
+
        .stderr(Stdio::inherit())
+
        .kill_on_drop(true)
+
        .reap_on_drop(true)
+
        .spawn()?
+
    };
+
    let mut stdout = child.stdout.take().unwrap();
+

+
    const HEADER: &[u8] = b"001e# service=git-upload-pack\n0000";
+
    send.write_all(HEADER).await?;
+
    let status = try_join!(copy(&mut stdout, &mut send), child.status()).map(|x| x.1);
+

+
    // Read one byte off the read stream to ensure it is driven to completion
+
    // (we expect EOF immediately). Failure to do so may cause resource leaks.
+
    //
+
    // Cf. 900b6cf6 (replication: Ensure git stream is closed, 2021-04-26)
+
    let mut buf = [0; 1];
+
    recv.read(&mut buf).await?;
+

+
    status
+
}
added link-git/src/refs.rs
@@ -0,0 +1,7 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
pub mod db;
+
pub use git_ref::*;
added link-git/src/refs/db.rs
@@ -0,0 +1,269 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    collections::BTreeSet,
+
    convert::TryInto,
+
    io,
+
    path::{Path, PathBuf},
+
    sync::Arc,
+
    time::{Duration, SystemTime},
+
};
+

+
use git_ref::{
+
    file::{self, iter::LooseThenPacked, Transaction, WriteReflog},
+
    packed,
+
    FullName,
+
    PartialNameRef,
+
    Reference,
+
    Target,
+
};
+
use parking_lot::RwLock;
+

+
pub mod error {
+
    use super::*;
+
    use thiserror::Error;
+

+
    #[derive(Debug, Error)]
+
    pub enum Open {
+
        #[error("failed to take a snapshot of packed-refs")]
+
        Snapshot(#[from] Snapshot),
+

+
        #[error(transparent)]
+
        Io(#[from] io::Error),
+
    }
+

+
    #[derive(Debug, Error)]
+
    pub enum Snapshot {
+
        #[error("failed to lock packed-refs")]
+
        Lock(#[from] git_lock::acquire::Error),
+

+
        #[error("failed to open packed-refs")]
+
        Open(#[from] packed::buffer::open::Error),
+

+
        #[error(transparent)]
+
        Io(#[from] io::Error),
+
    }
+

+
    #[derive(Debug, Error)]
+
    pub enum Follow {
+
        #[error("cyclic symref: {0:?}")]
+
        Cycle(FullName),
+

+
        #[error("reference {0:?} not found")]
+
        NotFound(FullName),
+

+
        #[error("max symref depth {0} exceeded")]
+
        DepthLimitExceeded(usize),
+

+
        #[error(transparent)]
+
        Find(#[from] file::find::Error),
+
    }
+
}
+

+
/// Threadsafe refdb with shareable `packed-refs` memory buffer.
+
///
+
/// Packed refs are a delicate business: they are written by an external
+
/// process, [`git-pack-refs`], _or_ when a packed ref is deleted. It may also
+
/// be that no `packed-refs` currently exist.
+
///
+
/// The only way we can be certain to operate on a consistent view of what is
+
/// committed to disk is to check if the `packed-refs` file has changed since we
+
/// last read it. This would be quite expensive to do for small operations.
+
/// Thus, the caller is responsible for determining just how much they can
+
/// afford to see possibly out-of-date data: the [`Refdb::snapshot`] method
+
/// checks if the previously loaded `packed-refs` appear to be out-of-date, and
+
/// reloads them if necessary. The resulting [`Snapshot`] contains a pointer to
+
/// an immutable memory buffer of the packed refs which can be shared between
+
/// threads, or cloned.
+
///
+
/// [`git-pack-refs`]: https://git-scm.com/docs/git-pack-refs
+
#[derive(Clone)]
+
pub struct Refdb {
+
    store: file::Store,
+
    packed: Arc<RwLock<Option<Packed>>>,
+
}
+

+
impl Refdb {
+
    pub fn open(git_dir: impl Into<PathBuf>) -> Result<Self, error::Open> {
+
        let store = file::Store::at(git_dir, WriteReflog::Normal);
+
        let packed = Arc::new(RwLock::new(Packed::open(store.packed_refs_path())?));
+
        Ok(Self { store, packed })
+
    }
+

+
    pub fn snapshot(&self) -> Result<Snapshot, error::Snapshot> {
+
        let read = self.packed.read();
+
        match &*read {
+
            None => {
+
                drop(read);
+
                // always modified, because it was None and now is Some
+
                self.reload(|_| true)
+
            },
+

+
            Some(packed) => {
+
                if packed.is_modified()? {
+
                    let mtime = packed.mtime;
+
                    drop(read);
+
                    // we don't care what the mtime is, only that we have a
+
                    // different value than before
+
                    self.reload(|packed1| packed1.mtime != mtime)
+
                } else {
+
                    Ok(Snapshot {
+
                        store: self.store.clone(),
+
                        packed: Some(packed.buf.clone()),
+
                    })
+
                }
+
            },
+
        }
+
    }
+

+
    fn reload<F>(&self, modified_while_blocked: F) -> Result<Snapshot, error::Snapshot>
+
    where
+
        F: FnOnce(&Packed) -> bool,
+
    {
+
        let mut write = self.packed.write();
+
        if let Some(packed) = &*write {
+
            if modified_while_blocked(packed) {
+
                return Ok(Snapshot {
+
                    store: self.store.clone(),
+
                    packed: Some(packed.buf.clone()),
+
                });
+
            }
+
        }
+

+
        match Packed::open(self.store.packed_refs_path())? {
+
            Some(packed) => {
+
                let buf = packed.buf.clone();
+
                *write = Some(packed);
+
                Ok(Snapshot {
+
                    store: self.store.clone(),
+
                    packed: Some(buf),
+
                })
+
            },
+

+
            None => {
+
                *write = None;
+
                Ok(Snapshot {
+
                    store: self.store.clone(),
+
                    packed: None,
+
                })
+
            },
+
        }
+
    }
+
}
+

+
#[derive(Clone)]
+
pub struct Snapshot {
+
    store: file::Store,
+
    packed: Option<Arc<packed::Buffer>>,
+
}
+

+
impl Snapshot {
+
    pub fn find<'a, N, E>(&self, name: N) -> Result<Option<Reference>, file::find::Error>
+
    where
+
        N: TryInto<PartialNameRef<'a>, Error = E>,
+
        file::find::Error: From<E>,
+
    {
+
        self.store.try_find(name, self.packed.as_deref())
+
    }
+

+
    pub fn transaction(&self) -> Transaction {
+
        self.store.transaction()
+
    }
+

+
    pub fn iter(&self, prefix: Option<impl AsRef<Path>>) -> io::Result<LooseThenPacked> {
+
        let packed = self.packed.as_deref();
+
        match prefix {
+
            None => self.store.iter(packed),
+
            Some(p) => self.store.iter_prefixed(packed, p),
+
        }
+
    }
+

+
    /// Follow a symbolic reference until a direct reference is found.
+
    ///
+
    /// If `symref` is a direct reference, a copy of it is returned. No more
+
    /// than five symbolic references will be followed, and cyclic
+
    /// references are detected. Both result in an error to be returned.
+
    ///
+
    /// Note that following is not the same as "peeling": no access to the
+
    /// object database is made, and thus no assumptions about the kind of
+
    /// object the reference ultimately points to can be made.
+
    pub fn follow(&self, symref: &Reference) -> Result<Reference, error::Follow> {
+
        match &symref.target {
+
            Target::Peeled(_) => Ok(symref.clone()),
+
            Target::Symbolic(name) => {
+
                let mut seen = BTreeSet::new();
+
                seen.insert(symref.name.clone());
+

+
                let mut next = self
+
                    .find(name.to_partial())?
+
                    .ok_or_else(|| error::Follow::NotFound(name.clone()))?;
+
                seen.insert(name.clone());
+

+
                const MAX_DEPTH: usize = 5;
+
                loop {
+
                    match next.target {
+
                        Target::Peeled(_) => return Ok(next),
+
                        Target::Symbolic(sym) => {
+
                            if seen.len() + 1 > MAX_DEPTH {
+
                                return Err(error::Follow::DepthLimitExceeded(MAX_DEPTH));
+
                            }
+

+
                            if seen.contains(&sym) {
+
                                return Err(error::Follow::Cycle(sym));
+
                            }
+
                            next = self
+
                                .find(sym.to_partial())?
+
                                .ok_or_else(|| error::Follow::NotFound(sym.clone()))?;
+
                            seen.insert(sym);
+
                        },
+
                    }
+
                }
+
            },
+
        }
+
    }
+
}
+

+
struct Packed {
+
    buf: Arc<packed::Buffer>,
+
    path: PathBuf,
+
    mtime: SystemTime,
+
}
+

+
impl Packed {
+
    fn open(path: PathBuf) -> Result<Option<Self>, error::Snapshot> {
+
        use git_lock::{acquire, Marker};
+

+
        let _lock = Marker::acquire_to_hold_resource(
+
            &path,
+
            acquire::Fail::AfterDurationWithBackoff(Duration::from_millis(500)),
+
            None,
+
        )?;
+
        match path.metadata() {
+
            // `git-lock` will happily lock a non-existent file
+
            Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(None),
+
            Err(e) => Err(e.into()),
+

+
            Ok(meta) => {
+
                let mtime = meta.modified()?;
+
                let buf = Arc::new(packed::Buffer::open(&path, 32 * 1024)?);
+
                Ok(Some(Self { buf, path, mtime }))
+
            },
+
        }
+
    }
+

+
    fn is_modified(&self) -> io::Result<bool> {
+
        match self.path.metadata() {
+
            // it existed before, so gone is modified
+
            Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(true),
+
            Err(e) => Err(e),
+

+
            Ok(meta) => {
+
                let mtime = meta.modified()?;
+
                Ok(self.mtime == mtime)
+
            },
+
        }
+
    }
+
}
added link-git/src/service.rs
@@ -0,0 +1,126 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{fmt::Debug, ops::Deref, str::FromStr};
+

+
use git2::transport::Service as GitService;
+
use lazy_static::lazy_static;
+

+
lazy_static! {
+
    static ref SERVICE_REGEX: regex::Regex = regex::Regex::new(r"(\S+) '/?(.+)'").unwrap();
+
}
+

+
#[derive(Clone, Copy, PartialEq)]
+
pub struct Service(pub GitService);
+

+
/// A service and URN as passed to the exec_request of an SSH server by git when
+
/// talking to an SSH remote. The `FromStr` implementation for this type expects
+
/// a string of the form:
+
///
+
/// <request type> /<path>
+
///
+
/// Where the request type is either `upload-pack` or `receive-pack`, the
+
/// leading slash before the urn is optional, and the `path` is whatever the
+
/// `FromStr` of `Path` provides.
+
#[derive(Debug, Clone)]
+
pub struct SshService<Path> {
+
    pub service: Service,
+
    pub path: Path,
+
}
+

+
impl<Path> SshService<Path> {
+
    pub fn is_upload(&self) -> bool {
+
        match self.service.0 {
+
            GitService::UploadPackLs | GitService::UploadPack => true,
+
            GitService::ReceivePackLs | GitService::ReceivePack => false,
+
        }
+
    }
+

+
    pub fn is_receive(&self) -> bool {
+
        !self.is_upload()
+
    }
+
}
+

+
impl From<GitService> for Service {
+
    fn from(g: GitService) -> Self {
+
        Service(g)
+
    }
+
}
+

+
impl From<Service> for GitService {
+
    fn from(s: Service) -> Self {
+
        s.0
+
    }
+
}
+

+
#[derive(thiserror::Error, Debug)]
+
pub enum ParseService {
+
    #[error("the exec str must be in the form <service> <urn>")]
+
    Format,
+
    #[error(transparent)]
+
    Namespace(Box<dyn std::error::Error + Send + Sync + 'static>),
+
    #[error("unknown service {0}")]
+
    UnknownService(String),
+
}
+

+
impl Debug for Service {
+
    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+
        f.debug_tuple("Service")
+
            .field(match self.0 {
+
                GitService::UploadPackLs => &"UploadPackLs",
+
                GitService::UploadPack => &"UploadPack",
+
                GitService::ReceivePackLs => &"ReceivePackLs",
+
                GitService::ReceivePack => &"ReceivePack",
+
            })
+
            .finish()
+
    }
+
}
+

+
impl std::fmt::Display for Service {
+
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+
        match self.0 {
+
            GitService::UploadPack => write!(f, "upload-pack"),
+
            GitService::UploadPackLs => write!(f, "upload-pack-ls"),
+
            GitService::ReceivePack => write!(f, "receive-pack"),
+
            GitService::ReceivePackLs => write!(f, "receive-pack-ls"),
+
        }
+
    }
+
}
+

+
impl Deref for Service {
+
    type Target = GitService;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.0
+
    }
+
}
+

+
impl<Path> FromStr for SshService<Path>
+
where
+
    Path: FromStr,
+
    Path::Err: std::error::Error + Send + Sync + 'static,
+
{
+
    type Err = ParseService;
+

+
    fn from_str(exec_str: &str) -> Result<Self, Self::Err> {
+
        let cap = SERVICE_REGEX
+
            .captures_iter(exec_str)
+
            .next()
+
            .ok_or(ParseService::Format)?;
+
        debug_assert!(cap.len() == 3);
+
        let service_str: &str = &cap[1];
+
        let urn_str = &cap[2];
+

+
        let path = urn_str
+
            .parse()
+
            .map_err(|err| ParseService::Namespace(Box::new(err)))?;
+
        let service = match service_str {
+
            "git-upload-pack" => Ok(Service(GitService::UploadPack)),
+
            "git-receive-pack" => Ok(Service(GitService::ReceivePack)),
+
            other => Err(ParseService::UnknownService(other.to_string())),
+
        }?;
+
        Ok(Self { service, path })
+
    }
+
}
added link-git/t/Cargo.toml
@@ -0,0 +1,31 @@
+
[package]
+
name = "link-git-test"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+

+
publish = false
+

+
[lib]
+
doctest = false
+
test = true
+
doc = false
+

+
[features]
+
test = []
+

+
[dev-dependencies]
+
anyhow = "1"
+
bstr = "0.2"
+
futures = "0.3"
+
futures_ringbuf = "0.3"
+
tempfile = "3.3"
+

+
[dev-dependencies.git2]
+
version = "0.13.24"
+
default-features = false
+
features = ["vendored-libgit2"]
+

+
[dev-dependencies.link-git]
+
path = ".."
+
features = ["git2"]

\ No newline at end of file
added link-git/t/src/integration.rs
@@ -0,0 +1,6 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
mod protocol;
added link-git/t/src/integration/protocol.rs
@@ -0,0 +1,382 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    collections::BTreeSet,
+
    io,
+
    path::Path,
+
    sync::{atomic::AtomicBool, Arc},
+
};
+

+
use bstr::ByteSlice as _;
+
use futures::{AsyncReadExt as _, TryFutureExt as _};
+
use link_git::protocol::{fetch, ls, packwriter, upload_pack, ObjectId, PackWriter, Ref};
+
use tempfile::{tempdir, TempDir};
+

+
fn upstream() -> TempDir {
+
    let tmp = tempdir().unwrap();
+

+
    let repo = git2::Repository::init_bare(&tmp).unwrap();
+
    let auth = git2::Signature::now("apollo", "apollo@cree.de").unwrap();
+

+
    let tree = {
+
        let empty = repo.treebuilder(None).unwrap();
+
        let oid = empty.write().unwrap();
+
        repo.find_tree(oid).unwrap()
+
    };
+
    let base = {
+
        let oid = repo
+
            .commit(
+
                Some("refs/namespaces/foo/refs/heads/main"),
+
                &auth,
+
                &auth,
+
                "initial",
+
                &tree,
+
                &[],
+
            )
+
            .unwrap();
+
        repo.find_commit(oid).unwrap()
+
    };
+
    let next = repo
+
        .commit(
+
            Some("refs/namespaces/foo/refs/heads/next"),
+
            &auth,
+
            &auth,
+
            "ng",
+
            &tree,
+
            &[&base],
+
        )
+
        .unwrap();
+
    repo.reference(
+
        "refs/namespaces/foo/refs/pulls/1/head",
+
        next,
+
        true,
+
        "pee arrr",
+
    )
+
    .unwrap();
+

+
    tmp
+
}
+

+
fn collect_refs(repo: &git2::Repository) -> Result<Vec<(String, git2::Oid)>, git2::Error> {
+
    repo.references()?
+
        .map(|x| x.map(|r| (r.name().unwrap().to_owned(), r.target().unwrap())))
+
        .collect()
+
}
+

+
fn update_tips<'a, T>(repo: &git2::Repository, tips: T) -> Result<(), anyhow::Error>
+
where
+
    T: IntoIterator<Item = &'a Ref>,
+
{
+
    for r in tips {
+
        match r {
+
            Ref::Direct { path, object } => {
+
                repo.reference(
+
                    path.to_str()?,
+
                    git2::Oid::from_bytes(object.as_slice())?,
+
                    true,
+
                    "",
+
                )?;
+
            },
+
            x => anyhow::bail!("unexpected ref variant: {:?}", x),
+
        }
+
    }
+

+
    Ok(())
+
}
+

+
fn collect_history(repo: &git2::Repository, tip: &str) -> Result<Vec<git2::Oid>, git2::Error> {
+
    let mut revwalk = repo.revwalk()?;
+
    revwalk.push_ref(tip)?;
+
    revwalk.collect()
+
}
+

+
fn run_ls_refs<R: AsRef<Path>>(remote: R, opt: ls::Options) -> io::Result<Vec<Ref>> {
+
    let (client, server) = futures_ringbuf::Endpoint::pair(256, 256);
+
    let client = async move {
+
        let (recv, send) = client.split();
+
        ls::ls_refs(opt, recv, send).await
+
    };
+
    let server = {
+
        let (recv, send) = server.split();
+
        upload_pack::upload_pack(&remote, recv, send).and_then(|(_hdr, run)| run)
+
    };
+

+
    let (client_out, server_out) =
+
        futures::executor::block_on(futures::future::try_join(client, server))?;
+
    assert!(server_out.success());
+
    Ok(client_out)
+
}
+

+
fn run_fetch<R, B, P>(
+
    remote: R,
+
    opt: fetch::Options,
+
    build_pack_writer: B,
+
) -> io::Result<fetch::Outputs<P::Output>>
+
where
+
    R: AsRef<Path>,
+
    B: FnOnce(Arc<AtomicBool>) -> P,
+
    P: PackWriter + Send + 'static,
+
    P::Output: Send + 'static,
+
{
+
    let (client, server) = futures_ringbuf::Endpoint::pair(256, 256);
+
    let client = async move {
+
        let (recv, send) = client.split();
+
        fetch::fetch(opt, build_pack_writer, recv, send).await
+
    };
+
    let server = {
+
        let (recv, send) = server.split();
+
        upload_pack::upload_pack(&remote, recv, send).and_then(|(_hdr, run)| run)
+
    };
+

+
    let (client_out, server_out) =
+
        futures::executor::block_on(futures::future::try_join(client, server))?;
+
    assert!(server_out.success());
+
    Ok(client_out)
+
}
+

+
#[test]
+
fn smoke() {
+
    let remote = upstream();
+
    let refs = run_ls_refs(
+
        &remote,
+
        ls::Options {
+
            repo: "foo".into(),
+
            extra_params: vec![],
+
            ref_prefixes: vec!["refs/heads/".into(), "refs/pulls/".into()],
+
        },
+
    )
+
    .unwrap();
+

+
    assert_eq!(
+
        refs.iter().map(|r| r.unpack().0).collect::<BTreeSet<_>>(),
+
        [
+
            "refs/heads/main".into(),
+
            "refs/heads/next".into(),
+
            "refs/pulls/1/head".into()
+
        ]
+
        .iter()
+
        .collect::<BTreeSet<_>>()
+
    );
+

+
    let out = run_fetch(
+
        &remote,
+
        fetch::Options {
+
            repo: "foo".into(),
+
            extra_params: vec![],
+
            haves: vec![],
+
            wants: vec![],
+
            want_refs: refs.iter().map(|r| r.unpack().0.clone()).collect(),
+
        },
+
        |_| packwriter::Discard,
+
    )
+
    .unwrap();
+

+
    assert!(out.pack.is_some());
+
}
+

+
#[test]
+
fn want_ref() {
+
    let remote = upstream();
+
    let out = run_fetch(
+
        &remote,
+
        fetch::Options {
+
            repo: "foo".into(),
+
            extra_params: vec![],
+
            haves: vec![],
+
            wants: vec![],
+
            want_refs: vec!["refs/heads/main".into(), "refs/pulls/1/head".into()],
+
        },
+
        |_| packwriter::Discard,
+
    )
+
    .unwrap();
+

+
    assert!(out.pack.is_some());
+
    assert_eq!(
+
        out.wanted_refs
+
            .iter()
+
            .map(|r| r.unpack().0)
+
            .collect::<BTreeSet<_>>(),
+
        ["refs/heads/main".into(), "refs/pulls/1/head".into(),]
+
            .iter()
+
            .collect::<BTreeSet<_>>()
+
    )
+
}
+

+
#[test]
+
#[should_panic(expected = "`fetch` is empty")]
+
fn empty_fetch() {
+
    let remote = upstream();
+
    run_fetch(
+
        &remote,
+
        fetch::Options {
+
            repo: "foo".into(),
+
            extra_params: vec![],
+
            haves: vec![],
+
            wants: vec![],
+
            want_refs: vec![],
+
        },
+
        |_| packwriter::Discard,
+
    )
+
    .unwrap();
+
}
+

+
fn clone_with<R, L, B, P>(remote: R, local: L, build_pack_writer: B)
+
where
+
    R: AsRef<Path>,
+
    L: AsRef<Path>,
+
    B: FnOnce(Arc<AtomicBool>) -> P,
+
    P: PackWriter + Send + 'static,
+
    P::Output: Send + 'static,
+
{
+
    let refs = run_ls_refs(
+
        &remote,
+
        ls::Options {
+
            repo: "foo".into(),
+
            extra_params: vec![],
+
            ref_prefixes: vec!["refs/heads/".into(), "refs/pulls/".into()],
+
        },
+
    )
+
    .unwrap();
+
    let out = run_fetch(
+
        &remote,
+
        fetch::Options {
+
            repo: "foo".into(),
+
            extra_params: vec![],
+
            haves: vec![],
+
            wants: vec![],
+
            want_refs: refs.iter().map(|r| r.unpack().0.clone()).collect(),
+
        },
+
        build_pack_writer,
+
    )
+
    .unwrap();
+

+
    assert!(out.pack.is_some());
+

+
    let remote_repo = git2::Repository::open(remote).unwrap();
+
    remote_repo.set_namespace("foo").unwrap();
+
    let local_repo = git2::Repository::open(&local).unwrap();
+

+
    update_tips(&local_repo, &out.wanted_refs).unwrap();
+

+
    let mut remote_refs = collect_refs(&remote_repo).unwrap();
+
    let mut local_refs = collect_refs(&local_repo).unwrap();
+

+
    remote_refs.sort();
+
    local_refs.sort();
+

+
    assert_eq!(remote_refs, local_refs);
+
}
+

+
#[test]
+
fn clone_libgit() {
+
    let remote = upstream();
+
    let local = tempdir().unwrap();
+
    let local_repo = git2::Repository::init(&local).unwrap();
+

+
    clone_with(&remote, &local, move |stop| {
+
        packwriter::Libgit::new(packwriter::Options::default(), local_repo, stop)
+
    })
+
}
+

+
#[test]
+
fn clone_gitoxide() {
+
    let remote = upstream();
+
    let local = tempdir().unwrap();
+
    let local_repo = git2::Repository::init(&local).unwrap();
+

+
    clone_with(&remote, &local, move |stop| {
+
        packwriter::Standard::new(
+
            local_repo.path(),
+
            packwriter::Options::default(),
+
            packwriter::StandardThickener::new(local_repo.path()),
+
            stop,
+
        )
+
    })
+
}
+

+
fn thin_pack_with<R, L, B, P>(remote: R, local: L, build_pack_writer: B)
+
where
+
    R: AsRef<Path>,
+
    L: AsRef<Path>,
+
    B: Fn(Arc<AtomicBool>) -> P,
+
    P: PackWriter + Send + 'static,
+
    P::Output: Send + 'static,
+
{
+
    // Clone main only
+
    {
+
        let out = run_fetch(
+
            &remote,
+
            fetch::Options {
+
                repo: "foo".into(),
+
                extra_params: vec![],
+
                haves: vec![],
+
                wants: vec![],
+
                want_refs: vec!["refs/heads/main".into()],
+
            },
+
            &build_pack_writer,
+
        )
+
        .unwrap();
+
        assert!(out.pack.is_some());
+
    }
+

+
    let remote_repo = git2::Repository::open(&remote).unwrap();
+
    remote_repo.set_namespace("foo").unwrap();
+
    let local_repo = git2::Repository::open(&local).unwrap();
+

+
    // Fetch next, which is ahead of main
+
    {
+
        let head = remote_repo.refname_to_id("refs/heads/main").unwrap();
+
        let out = run_fetch(
+
            &remote,
+
            fetch::Options {
+
                repo: "foo".into(),
+
                extra_params: vec![],
+
                haves: vec![ObjectId::from_20_bytes(head.as_bytes())],
+
                wants: vec![],
+
                want_refs: vec!["refs/heads/next".into()],
+
            },
+
            build_pack_writer,
+
        )
+
        .unwrap();
+
        assert!(out.pack.is_some());
+

+
        update_tips(&local_repo, &out.wanted_refs).unwrap();
+
    }
+

+
    let remote_history = collect_history(&remote_repo, "refs/heads/next").unwrap();
+
    let local_history = collect_history(&local_repo, "refs/heads/next").unwrap();
+

+
    assert!(!remote_history.is_empty());
+
    assert_eq!(remote_history, local_history)
+
}
+

+
#[test]
+
fn thin_pack_libgit() {
+
    let remote = upstream();
+
    let local = tempdir().unwrap();
+

+
    thin_pack_with(&remote, &local, |stop| {
+
        let local_repo = git2::Repository::init(&local).unwrap();
+
        packwriter::Libgit::new(packwriter::Options::default(), local_repo, stop)
+
    })
+
}
+

+
#[test]
+
fn thin_pack_gitoxide() {
+
    let remote = upstream();
+
    let local = tempdir().unwrap();
+
    let local_repo = git2::Repository::init(&local).unwrap();
+
    let git_dir = local_repo.path().to_owned();
+

+
    thin_pack_with(&remote, &local, move |stop| {
+
        packwriter::Standard::new(
+
            &git_dir,
+
            packwriter::Options::default(),
+
            packwriter::StandardThickener::new(&git_dir),
+
            stop,
+
        )
+
    })
+
}
added link-git/t/src/lib.rs
@@ -0,0 +1,7 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
#[cfg(test)]
+
mod integration;
+
#[cfg(test)]
+
mod tests;
added link-git/t/src/tests.rs
@@ -0,0 +1,6 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
mod protocol;
added link-git/t/src/tests/protocol.rs
@@ -0,0 +1,7 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
mod take;
+
mod upload_pack;
added link-git/t/src/tests/protocol/take.rs
@@ -0,0 +1,47 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use futures::{executor::block_on, io::Cursor, AsyncReadExt as _};
+
use link_git::protocol::take::TryTake;
+
use std::io;
+

+
#[test]
+
fn when_within_limit() {
+
    let input = b"the world is everything that is the case";
+
    let output = block_on(async move {
+
        let mut buf = Vec::with_capacity(input.len());
+
        TryTake::new(Cursor::new(input), input.len() as u64 + 1)
+
            .read_to_end(&mut buf)
+
            .await?;
+
        Ok::<_, io::Error>(buf)
+
    })
+
    .unwrap();
+

+
    assert_eq!(input, output.as_slice())
+
}
+

+
#[test]
+
fn when_limit_exceeded() {
+
    let input = b"what is the case, the fact, is the existence of atomic facts";
+
    let output =
+
        block_on(TryTake::new(Cursor::new(input), 10).read_to_end(&mut Vec::new())).unwrap_err();
+

+
    assert_eq!(output.to_string(), "max input size exceeded")
+
}
+

+
#[test]
+
fn excess_bytes_remain() {
+
    let input = b"whereof one cannot speak, thereof one must be silent";
+
    let output = block_on(async move {
+
        let mut buf = Vec::with_capacity(input.len());
+
        let res = TryTake::new(Cursor::new(input), input.len() as u64)
+
            .read_to_end(&mut buf)
+
            .await;
+
        assert!(res.is_err());
+
        buf
+
    });
+

+
    assert_eq!(input, output.as_slice())
+
}
added link-git/t/src/tests/protocol/upload_pack.rs
@@ -0,0 +1,129 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use link_git::protocol::upload_pack;
+

+
mod header {
+
    use super::*;
+
    use std::str::FromStr as _;
+

+
    #[test]
+
    fn service_must_be_upload_pack() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-receive-pack "),
+
            Err("unsupported service")
+
        )
+
    }
+

+
    #[test]
+
    fn no_path() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack "),
+
            Err("missing path")
+
        )
+
    }
+

+
    #[test]
+
    fn empty_path() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack \0host=lolhost:123\0"),
+
            Err("empty path")
+
        )
+
    }
+

+
    #[test]
+
    fn host_and_port() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack /git.git\0host=lolhost:123\0").unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: Some(("lolhost".to_owned(), Some(123))),
+
                extra: vec![]
+
            }
+
        )
+
    }
+

+
    #[test]
+
    fn host_without_port() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack /git.git\0host=lolhost\0").unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: Some(("lolhost".to_owned(), None)),
+
                extra: vec![]
+
            }
+
        )
+
    }
+

+
    #[test]
+
    fn no_host() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack /git.git\0").unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: None,
+
                extra: vec![]
+
            }
+
        )
+
    }
+

+
    #[test]
+
    fn empty_host() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack /git.git\0\0").unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: None,
+
                extra: vec![]
+
            }
+
        )
+
    }
+

+
    #[test]
+
    fn no_host_extra() {
+
        assert_eq!(
+
            upload_pack::Header::from_str("git-upload-pack /git.git\0\0version=42\0").unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: None,
+
                extra: vec![("version".to_owned(), Some("42".to_owned()))]
+
            }
+
        )
+
    }
+

+
    #[test]
+
    fn host_port_extra() {
+
        assert_eq!(
+
            upload_pack::Header::from_str(
+
                "git-upload-pack /git.git\0host=lolhost:123\0\0version=42\0"
+
            )
+
            .unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: Some(("lolhost".to_owned(), Some(123))),
+
                extra: vec![("version".to_owned(), Some("42".to_owned()))]
+
            }
+
        )
+
    }
+

+
    #[test]
+
    fn host_extra_extra() {
+
        assert_eq!(
+
            upload_pack::Header::from_str(
+
                "git-upload-pack /git.git\0host=lolhost\0\0version=42\0foo\0n=69\0"
+
            )
+
            .unwrap(),
+
            upload_pack::Header {
+
                path: "/git.git".to_owned(),
+
                host: Some(("lolhost".to_owned(), None)),
+
                extra: vec![
+
                    ("version".to_owned(), Some("42".to_owned())),
+
                    ("foo".to_owned(), None),
+
                    ("n".to_owned(), Some("69".to_owned()))
+
                ]
+
            }
+
        )
+
    }
+
}
added macros/Cargo.toml
@@ -0,0 +1,20 @@
+
[package]
+
name = "radicle-macros"
+
version = "0.1.0"
+
authors = ["The Radicle Team <dev@radicle.xyz>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+
description = "Radicle procedural macros"
+

+
[lib]
+
doctest = false
+
proc-macro = true
+
test = false
+

+
[dependencies]
+
proc-macro-error = "1.0.4"
+
quote = "1"
+
syn = "1"
+

+
[dependencies.radicle-git-ext]
+
path = "../git-ext"
added macros/src/lib.rs
@@ -0,0 +1,72 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
#[macro_use]
+
extern crate proc_macro_error;
+

+
use std::convert::TryFrom;
+

+
use proc_macro::TokenStream;
+
use proc_macro_error::abort;
+
use quote::quote;
+
use syn::{parse_macro_input, LitStr};
+

+
use radicle_git_ext::reference::name::{RefLike, RefspecPattern};
+

+
/// Create `RefLike` from a string literal.
+
///
+
/// The string is validated at compile time, and an unsafe conversion is
+
/// emitted.
+
///
+
/// ```rust
+
/// use radicle_macros::reflike;
+
///
+
/// assert_eq!("lolek/bolek", reflike!("lolek/bolek").as_str())
+
/// ```
+
#[proc_macro_error]
+
#[proc_macro]
+
pub fn reflike(input: TokenStream) -> TokenStream {
+
    let lit = parse_macro_input!(input as LitStr);
+

+
    match RefLike::try_from(lit.value()) {
+
        Ok(safe) => {
+
            let safe: &str = &*safe;
+
            let expand = quote! { unsafe { ::std::mem::transmute::<_, ::radicle_git_ext::RefLike>(#safe.to_owned()) }};
+
            TokenStream::from(expand)
+
        },
+

+
        Err(e) => {
+
            abort!(lit.span(), "invalid RefLike literal: {}", e);
+
        },
+
    }
+
}
+

+
/// Create a `RefspecPattern` from a string literal.
+
///
+
/// The string is validated at compile time, and an unsafe conversion is
+
/// emitted.
+
///
+
/// ```rust
+
/// use radicle_macros::refspec_pattern;
+
///
+
/// assert_eq!("refs/heads/*", refspec_pattern!("refs/heads/*").as_str())
+
/// ```
+
#[proc_macro_error]
+
#[proc_macro]
+
pub fn refspec_pattern(input: TokenStream) -> TokenStream {
+
    let lit = parse_macro_input!(input as LitStr);
+

+
    match RefspecPattern::try_from(lit.value()) {
+
        Ok(safe) => {
+
            let safe: &str = &*safe;
+
            let expand = quote! { unsafe { ::std::mem::transmute::<_, ::radicle_git_ext::RefspecPattern>(#safe.to_owned()) }};
+
            TokenStream::from(expand)
+
        },
+

+
        Err(e) => {
+
            abort!(lit.span(), "invalid RefspecPattern literal: {}", e);
+
        },
+
    }
+
}
added nix/cargo-nextest/default.nix
@@ -0,0 +1,30 @@
+
{ sources ? import ../sources.nix
+
, pkgs ? import sources.nixpkgs
+
}:
+
with pkgs;
+
rustPlatform.buildRustPackage rec {
+
  pname = "cargo-nextest";
+
  version = "0.9.9";
+

+
  src = fetchFromGitHub {
+
    owner = "nextest-rs";
+
    repo = "nextest";
+
    rev = "cargo-nextest-${version}";
+
    sha256 = "sha256-1s1N126S51kg7aOgAb8oMts1zJcO6QRn1fwbQf6ZaJ8=";
+
  };
+

+
  cargoSha256 = "sha256-JxZyl5Hti3Hh33e7H/pXhM6WkU0kDDml0naBPYzvNy4=";
+

+
  nativeBuildInputs = [
+
    pkg-config
+
  ];
+

+
  buildInputs = [
+
    openssl
+
    libiconv
+
  ] ++ lib.optionals stdenv.isDarwin [
+
    Security
+
  ];
+

+
  doCheck = false;
+
}
added nix/ci/fmt
@@ -0,0 +1,4 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
cargo fmt -- --check
added nix/ci/run
@@ -0,0 +1,9 @@
+
#!/usr/bin/env bash
+
set -eou pipefail
+

+
./nix/ci/fmt
+
./scripts/ci/lint
+
./scripts/ci/build
+
./scripts/ci/test
+
./scripts/ci/docs
+
./scripts/ci/advisory
added nix/sources.json
@@ -0,0 +1,38 @@
+
{
+
    "niv": {
+
        "branch": "master",
+
        "description": "Easy dependency management for Nix projects",
+
        "homepage": "https://github.com/nmattia/niv",
+
        "owner": "nmattia",
+
        "repo": "niv",
+
        "rev": "82e5cd1ad3c387863f0545d7591512e76ab0fc41",
+
        "sha256": "090l219mzc0gi33i3psgph6s2pwsc8qy4lyrqjdj4qzkvmaj65a7",
+
        "type": "tarball",
+
        "url": "https://github.com/nmattia/niv/archive/82e5cd1ad3c387863f0545d7591512e76ab0fc41.tar.gz",
+
        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+
    },
+
    "nixpkgs": {
+
        "branch": "release-21.11",
+
        "description": "Nix Packages collection",
+
        "homepage": "",
+
        "owner": "NixOS",
+
        "repo": "nixpkgs",
+
        "rev": "eabc38219184cc3e04a974fe31857d8e0eac098d",
+
        "sha256": "04ffwp2gzq0hhz7siskw6qh9ys8ragp7285vi1zh8xjksxn1msc5",
+
        "type": "tarball",
+
        "url": "https://github.com/NixOS/nixpkgs/archive/eabc38219184cc3e04a974fe31857d8e0eac098d.tar.gz",
+
        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+
    },
+
    "rust-overlay": {
+
        "branch": "master",
+
        "description": "Pure and reproducible nix overlay of binary distributed rust toolchains",
+
        "homepage": "",
+
        "owner": "oxalica",
+
        "repo": "rust-overlay",
+
        "rev": "c97cf9d581e09b767f5e3503b43dc3e4cd91bd99",
+
        "sha256": "02r3y1x4sdzdy0qzds6286v51406jk7ywks6fjivmb3c5mlhvq8x",
+
        "type": "tarball",
+
        "url": "https://github.com/oxalica/rust-overlay/archive/c97cf9d581e09b767f5e3503b43dc3e4cd91bd99.tar.gz",
+
        "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+
    }
+
}
added nix/sources.nix
@@ -0,0 +1,174 @@
+
# This file has been generated by Niv.
+

+
let
+

+
  #
+
  # The fetchers. fetch_<type> fetches specs of type <type>.
+
  #
+

+
  fetch_file = pkgs: name: spec:
+
    let
+
      name' = sanitizeName name + "-src";
+
    in
+
      if spec.builtin or true then
+
        builtins_fetchurl { inherit (spec) url sha256; name = name'; }
+
      else
+
        pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
+

+
  fetch_tarball = pkgs: name: spec:
+
    let
+
      name' = sanitizeName name + "-src";
+
    in
+
      if spec.builtin or true then
+
        builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
+
      else
+
        pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
+

+
  fetch_git = name: spec:
+
    let
+
      ref =
+
        if spec ? ref then spec.ref else
+
          if spec ? branch then "refs/heads/${spec.branch}" else
+
            if spec ? tag then "refs/tags/${spec.tag}" else
+
              abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
+
    in
+
      builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
+

+
  fetch_local = spec: spec.path;
+

+
  fetch_builtin-tarball = name: throw
+
    ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
+
        $ niv modify ${name} -a type=tarball -a builtin=true'';
+

+
  fetch_builtin-url = name: throw
+
    ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
+
        $ niv modify ${name} -a type=file -a builtin=true'';
+

+
  #
+
  # Various helpers
+
  #
+

+
  # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
+
  sanitizeName = name:
+
    (
+
      concatMapStrings (s: if builtins.isList s then "-" else s)
+
        (
+
          builtins.split "[^[:alnum:]+._?=-]+"
+
            ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
+
        )
+
    );
+

+
  # The set of packages used when specs are fetched using non-builtins.
+
  mkPkgs = sources: system:
+
    let
+
      sourcesNixpkgs =
+
        import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
+
      hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
+
      hasThisAsNixpkgsPath = <nixpkgs> == ./.;
+
    in
+
      if builtins.hasAttr "nixpkgs" sources
+
      then sourcesNixpkgs
+
      else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
+
        import <nixpkgs> {}
+
      else
+
        abort
+
          ''
+
            Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
+
            add a package called "nixpkgs" to your sources.json.
+
          '';
+

+
  # The actual fetching function.
+
  fetch = pkgs: name: spec:
+

+
    if ! builtins.hasAttr "type" spec then
+
      abort "ERROR: niv spec ${name} does not have a 'type' attribute"
+
    else if spec.type == "file" then fetch_file pkgs name spec
+
    else if spec.type == "tarball" then fetch_tarball pkgs name spec
+
    else if spec.type == "git" then fetch_git name spec
+
    else if spec.type == "local" then fetch_local spec
+
    else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
+
    else if spec.type == "builtin-url" then fetch_builtin-url name
+
    else
+
      abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
+

+
  # If the environment variable NIV_OVERRIDE_${name} is set, then use
+
  # the path directly as opposed to the fetched source.
+
  replace = name: drv:
+
    let
+
      saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
+
      ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
+
    in
+
      if ersatz == "" then drv else
+
        # this turns the string into an actual Nix path (for both absolute and
+
        # relative paths)
+
        if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
+

+
  # Ports of functions for older nix versions
+

+
  # a Nix version of mapAttrs if the built-in doesn't exist
+
  mapAttrs = builtins.mapAttrs or (
+
    f: set: with builtins;
+
    listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
+
  );
+

+
  # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
+
  range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
+

+
  # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
+
  stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
+

+
  # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
+
  stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
+
  concatMapStrings = f: list: concatStrings (map f list);
+
  concatStrings = builtins.concatStringsSep "";
+

+
  # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
+
  optionalAttrs = cond: as: if cond then as else {};
+

+
  # fetchTarball version that is compatible between all the versions of Nix
+
  builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
+
    let
+
      inherit (builtins) lessThan nixVersion fetchTarball;
+
    in
+
      if lessThan nixVersion "1.12" then
+
        fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+
      else
+
        fetchTarball attrs;
+

+
  # fetchurl version that is compatible between all the versions of Nix
+
  builtins_fetchurl = { url, name ? null, sha256 }@attrs:
+
    let
+
      inherit (builtins) lessThan nixVersion fetchurl;
+
    in
+
      if lessThan nixVersion "1.12" then
+
        fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+
      else
+
        fetchurl attrs;
+

+
  # Create the final "sources" from the config
+
  mkSources = config:
+
    mapAttrs (
+
      name: spec:
+
        if builtins.hasAttr "outPath" spec
+
        then abort
+
          "The values in sources.json should not have an 'outPath' attribute"
+
        else
+
          spec // { outPath = replace name (fetch config.pkgs name spec); }
+
    ) config.sources;
+

+
  # The "config" used by the fetchers
+
  mkConfig =
+
    { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
+
    , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
+
    , system ? builtins.currentSystem
+
    , pkgs ? mkPkgs sources system
+
    }: rec {
+
      # The sources, i.e. the attribute set of spec name to spec
+
      inherit sources;
+

+
      # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
+
      inherit pkgs;
+
    };
+

+
in
+
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
added scripts/ci/advisory
@@ -0,0 +1,8 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
cargo deny --version
+
cargo deny check advisories
+
cargo deny check licenses
+
cargo deny check bans
+
cargo deny check sources
added scripts/ci/build
@@ -0,0 +1,4 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
cargo build --tests --workspace
added scripts/ci/docs
@@ -0,0 +1,5 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links -D warnings" \
+
cargo doc --no-deps --workspace --document-private-items
added scripts/ci/fmt
@@ -0,0 +1,4 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
cargo +nightly fmt -- --check
added scripts/ci/lint
@@ -0,0 +1,7 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
# Force clippy to consider all local sources
+
# https://github.com/rust-lang/rust-clippy/issues/4612
+
find . -name "*.rs" -not -path "./target/*" -exec touch "{}" +
+
cargo clippy --all-targets -- -D warnings
added scripts/ci/macos-gnu
@@ -0,0 +1,5 @@
+
if brew ls --versions gnu-tar > /dev/null; then
+
  echo "gnu-tar is already installed"
+
else
+
  brew install gnu-tar
+
fi
added scripts/ci/run
@@ -0,0 +1,9 @@
+
#!/usr/bin/env bash
+
set -eou pipefail
+

+
./scripts/ci/fmt
+
./scripts/ci/lint
+
./scripts/ci/advisory
+
./scripts/ci/build
+
./scripts/ci/test
+
./scripts/ci/docs
added scripts/ci/test
@@ -0,0 +1,8 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
RUST_LOG=error ssh-agent cargo nextest run \
+
    --status-level all \
+
    --failure-output immediate-final \
+
    --no-fail-fast \
+
    --retries 2
added scripts/ci/test-fast
@@ -0,0 +1,4 @@
+
#!/usr/bin/env bash
+
set -eoux pipefail
+

+
cargo nextest run --no-fail-fast tests properties
added shell.nix
@@ -0,0 +1,22 @@
+
{ sources ? import ./nix/sources.nix
+
, pkgs ? import sources.nixpkgs {
+
    overlays = [ (import sources.rust-overlay) ];
+
  }
+
}:
+
let
+
  stable = pkgs.rust-bin.stable.latest.default;
+
  rust-overlay = stable.override {
+
    extensions = [ "rust-src" "rust-analysis" ];
+
  };
+
  devault = (pkgs.callPackage ./default.nix {});
+
in
+
  with pkgs;
+
  mkShell {
+
    name = "development";
+
    buildInputs = devault.buildInputs ++ [
+
        clang
+
        lld
+

+
        ripgrep
+
    ];
+
  }
added std-ext/Cargo.toml
@@ -0,0 +1,15 @@
+
[package]
+
name = "radicle-std-ext"
+
version = "0.1.0"
+
authors = ["The Radicle Team <dev@radicle.xyz>"]
+
edition = "2018"
+
license = "GPL-3.0-or-later"
+
description = "Monkey patches of std types"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[features]
+
default = []
+
nightly = []
added std-ext/src/lib.rs
@@ -0,0 +1,19 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
#![cfg_attr(feature = "nightly", feature(try_trait_v2))]
+

+
pub mod ops;
+
pub mod result;
+

+
pub type Void = std::convert::Infallible;
+

+
pub mod prelude {
+
    use super::*;
+

+
    pub use super::Void;
+
    pub use ops::{FromResidual, Try};
+
    pub use result::ResultExt;
+
}
added std-ext/src/ops.rs
@@ -0,0 +1,173 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
//! Provides Try-trait for stable rust
+
//!
+
//! Probably doesn't work with `?`-desugaring. If the `nightly` feature is
+
//! enabled for this crate, the `std` version is enabled.
+

+
#[cfg(not(feature = "nightly"))]
+
pub use stable::{FromResidual, Try};
+
#[cfg(feature = "nightly")]
+
pub use std::ops::{FromResidual, Try};
+

+
mod stable {
+
    use std::{convert, ops::ControlFlow, task::Poll};
+

+
    pub trait Try: FromResidual {
+
        type Output;
+
        type Residual;
+

+
        fn from_output(output: Self::Output) -> Self;
+
        fn branch(self) -> ControlFlow<Self::Residual, Self::Output>;
+
    }
+

+
    pub trait FromResidual<R = <Self as Try>::Residual> {
+
        fn from_residual(residual: R) -> Self;
+
    }
+

+
    impl<B, C> Try for ControlFlow<B, C> {
+
        type Output = C;
+
        type Residual = ControlFlow<B, convert::Infallible>;
+

+
        #[inline]
+
        fn from_output(output: Self::Output) -> Self {
+
            ControlFlow::Continue(output)
+
        }
+

+
        #[inline]
+
        fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
+
            match self {
+
                ControlFlow::Continue(c) => ControlFlow::Continue(c),
+
                ControlFlow::Break(b) => ControlFlow::Break(ControlFlow::Break(b)),
+
            }
+
        }
+
    }
+

+
    impl<B, C> FromResidual for ControlFlow<B, C> {
+
        #[inline]
+
        fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {
+
            match residual {
+
                ControlFlow::Break(b) => ControlFlow::Break(b),
+
                _ => unreachable!(),
+
            }
+
        }
+
    }
+

+
    impl<T> Try for Option<T> {
+
        type Output = T;
+
        type Residual = Option<convert::Infallible>;
+

+
        #[inline]
+
        fn from_output(output: Self::Output) -> Self {
+
            Some(output)
+
        }
+

+
        #[inline]
+
        fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
+
            match self {
+
                Some(v) => ControlFlow::Continue(v),
+
                None => ControlFlow::Break(None),
+
            }
+
        }
+
    }
+

+
    impl<T> FromResidual for Option<T> {
+
        #[inline]
+
        fn from_residual(residual: Option<convert::Infallible>) -> Self {
+
            match residual {
+
                None => None,
+
                _ => unreachable!(),
+
            }
+
        }
+
    }
+

+
    impl<T, E> Try for Result<T, E> {
+
        type Output = T;
+
        type Residual = Result<convert::Infallible, E>;
+

+
        #[inline]
+
        fn from_output(output: Self::Output) -> Self {
+
            Ok(output)
+
        }
+

+
        #[inline]
+
        fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
+
            match self {
+
                Ok(v) => ControlFlow::Continue(v),
+
                Err(e) => ControlFlow::Break(Err(e)),
+
            }
+
        }
+
    }
+

+
    impl<T, E, F: From<E>> FromResidual<Result<convert::Infallible, E>> for Result<T, F> {
+
        #[inline]
+
        fn from_residual(residual: Result<convert::Infallible, E>) -> Self {
+
            match residual {
+
                Err(e) => Err(From::from(e)),
+
                _ => unreachable!(),
+
            }
+
        }
+
    }
+

+
    impl<T, E> Try for Poll<Option<Result<T, E>>> {
+
        type Output = Poll<Option<T>>;
+
        type Residual = Result<convert::Infallible, E>;
+

+
        #[inline]
+
        fn from_output(c: Self::Output) -> Self {
+
            c.map(|x| x.map(Ok))
+
        }
+

+
        #[inline]
+
        fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
+
            match self {
+
                Poll::Ready(Some(Ok(x))) => ControlFlow::Continue(Poll::Ready(Some(x))),
+
                Poll::Ready(Some(Err(e))) => ControlFlow::Break(Err(e)),
+
                Poll::Ready(None) => ControlFlow::Continue(Poll::Ready(None)),
+
                Poll::Pending => ControlFlow::Continue(Poll::Pending),
+
            }
+
        }
+
    }
+

+
    impl<T, E, F: From<E>> FromResidual<Result<convert::Infallible, E>> for Poll<Option<Result<T, F>>> {
+
        #[inline]
+
        fn from_residual(x: Result<convert::Infallible, E>) -> Self {
+
            match x {
+
                Err(e) => Poll::Ready(Some(Err(From::from(e)))),
+
                _ => unreachable!(),
+
            }
+
        }
+
    }
+

+
    impl<T, E> Try for Poll<Result<T, E>> {
+
        type Output = Poll<T>;
+
        type Residual = Result<convert::Infallible, E>;
+

+
        #[inline]
+
        fn from_output(c: Self::Output) -> Self {
+
            c.map(Ok)
+
        }
+

+
        #[inline]
+
        fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
+
            match self {
+
                Poll::Ready(Ok(x)) => ControlFlow::Continue(Poll::Ready(x)),
+
                Poll::Ready(Err(e)) => ControlFlow::Break(Err(e)),
+
                Poll::Pending => ControlFlow::Continue(Poll::Pending),
+
            }
+
        }
+
    }
+

+
    impl<T, E, F: From<E>> FromResidual<Result<convert::Infallible, E>> for Poll<Result<T, F>> {
+
        #[inline]
+
        fn from_residual(x: Result<convert::Infallible, E>) -> Self {
+
            match x {
+
                Err(e) => Poll::Ready(Err(From::from(e))),
+
                _ => unreachable!(),
+
            }
+
        }
+
    }
+
}
added std-ext/src/result.rs
@@ -0,0 +1,40 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
pub trait ResultExt<T, E> {
+
    /// Calls `f` if the result is [`Err`], **and** the predicate `pred` on the
+
    /// error value returns true. Otherwise returns the [`Ok`] value of
+
    /// `self`. Note that `f` may change the error type, so as long as the
+
    /// target type can be converted from the original one.
+
    ///
+
    /// # Examples
+
    ///
+
    /// ```
+
    /// use std::io;
+
    /// use radicle_std_ext::result::ResultExt as _;
+
    ///
+
    /// let res = Err(io::Error::new(io::ErrorKind::Other, "crashbug"))
+
    ///     .or_matches::<io::Error, _, _>(|e| matches!(e.kind(), io::ErrorKind::Other), || Ok(()))
+
    ///     .unwrap();
+
    ///
+
    /// assert_eq!((), res)
+
    /// ```
+
    fn or_matches<E2, P, F>(self, pred: P, f: F) -> Result<T, E2>
+
    where
+
        E2: From<E>,
+
        P: FnOnce(&E) -> bool,
+
        F: FnOnce() -> Result<T, E2>;
+
}
+

+
impl<T, E> ResultExt<T, E> for Result<T, E> {
+
    fn or_matches<E2, P, F>(self, pred: P, f: F) -> Result<T, E2>
+
    where
+
        E2: From<E>,
+
        P: FnOnce(&E) -> bool,
+
        F: FnOnce() -> Result<T, E2>,
+
    {
+
        self.or_else(|e| if pred(&e) { f() } else { Err(e.into()) })
+
    }
+
}
added test/.gitignore
@@ -0,0 +1 @@
+
proptest-regressions
added test/Cargo.toml
@@ -0,0 +1,31 @@
+
[package]
+
name = "tests"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+

+
publish = false
+

+
[lib]
+
doctest = false
+
test = true
+
doc = false
+

+
[dev-dependencies.git-ext-test]
+
path = "../git-ext/t"
+
features = ["test"]
+

+
[dev-dependencies.git-ref-format-test]
+
path = "../git-ref-format/t"
+
features = ["test"]
+

+
[dev-dependencies.git-trailers-test]
+
path = "../git-trailers/t"
+
features = ["test"]
+

+
# [dev-dependencies.gitd-lib-test]
+
# path = "../cli/gitd-lib/t"
+

+
[dev-dependencies.link-git-test]
+
path = "../link-git/t"
+
features = ["test"]
added test/README.md
@@ -0,0 +1,52 @@
+
# Test root crate
+

+
Organisation of the test code for the `radicle-git` project crates deviates
+
from the `cargo` conventions in order to work around some of the limitations of
+
the current `cargo` / Rust testing infrastructure.
+

+
Here is how:
+

+
- Project crates are set to `test = false` by default, ie. no `#[cfg(test)]` /
+
  `#[test]` annotated tests are run.
+

+
- Instead, project crates are tested via an accompanying `<crate>-test` crate
+
  located in a `t/` directory relative to the crate root.
+

+
- This is similar to what `cargo` calls "integration tests", in that only the
+
  public API of the crate under test is available. Test crates are, however,
+
  meant to contain all kinds of tests.
+

+
- Conventionally, tests are split into module hierarchies, mainly to support
+
  convenient filtering.
+

+
    `tests`
+
    : Unit tests. Example-based, preferably-pure.
+

+
    `properties`
+
    : Property tests. Randomized, preferably-pure.
+

+
    `integration`
+
    : Stateful tests, scenario-based. May have all kinds of effects.
+

+
- Additionally, test crates may export helpers (such as mocks or fixtures) and
+
  `proptest` generators through `gen` and `helpers` modules. Test crates may
+
  depend on each other to make those types / functions available, possibly
+
  mirroring the dependency relationships of their respective "parent" crates.
+

+
- `gen` and `helpers` modules are guarded behind a feature flag "test", ie.
+

+
        #[cfg(any(test, feature = "test"))]
+

+
- Additional helpers can be found in the `test-helpers` (preferably-pure) and
+
  `it-helpers` (stateful) crates.
+

+
- This crate (`test`) does not contain any code, but depends on all other test
+
  crates in the workspace (which are themselves not proper workspace members).
+
  This prevents unnecessary compilation of test crates if no test target is
+
  being built, but still makes each test crate available to be executed
+
  individually via the `-p` flag, eg.
+

+
        cargo test -p link-replication-test
+

+
- It is recommended to use [`cargo-nextest`](https://nexte.st) instead of `cargo
+
  test` for maximising parallelism.
added test/it-helpers/Cargo.toml
@@ -0,0 +1,48 @@
+
[package]
+
name = "it-helpers"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+
publish = false
+

+
description = "Integration test helpers"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[dependencies]
+
anyhow = "1"
+
futures = "0.3"
+
once_cell = "1.10"
+
tempfile = "3.3"
+
tokio = "1.13"
+
tracing = "0.1"
+
either = "1.6"
+

+
[dependencies.git2]
+
version = "0.13.24"
+
default-features = false
+
features = ["vendored-libgit2"]
+

+
#
+
# workspace dependencies
+
#
+

+
[dependencies.git-ref-format]
+
path = "../../git-ref-format"
+

+
[dependencies.librad]
+
path = "../../librad"
+

+
[dependencies.link-async]
+
path = "../../link-async"
+

+
[dependencies.lnk-clib]
+
path = "../../cli/lnk-clib"
+

+
[dependencies.radicle-git-ext]
+
path = "../../git-ext"
+

+
[dependencies.test-helpers]
+
path = "../test-helpers"
added test/it-helpers/src/git.rs
@@ -0,0 +1,27 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use git_ref_format::Qualified;
+

+
#[tracing::instrument(skip(repo))]
+
pub fn create_commit(
+
    repo: &git2::Repository,
+
    on_branch: Qualified,
+
) -> Result<git2::Oid, git2::Error> {
+
    let empty_tree = {
+
        let mut index = repo.index()?;
+
        let oid = index.write_tree()?;
+
        repo.find_tree(oid).unwrap()
+
    };
+
    let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
+
    repo.commit(
+
        Some(on_branch.as_str()),
+
        &author,
+
        &author,
+
        "Initial commit",
+
        &empty_tree,
+
        &[],
+
    )
+
}
added test/it-helpers/src/lib.rs
@@ -0,0 +1,10 @@
+
#[macro_use]
+
extern crate tracing;
+

+
pub mod fixed;
+
pub mod git;
+
pub mod layout;
+
pub mod ssh;
+
pub mod testnet;
+
pub mod tmp;
+
pub mod working_copy;
added test/it-helpers/src/ssh.rs
@@ -0,0 +1,68 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::process::{Command, Stdio};
+

+
use lnk_clib::keys::ssh::SshAuthSock;
+
use test_helpers::tempdir::WithTmpDir;
+

+
pub type TmpSshSock = WithTmpDir<SshAuthSock>;
+

+
pub fn ssh_auth_sock() -> TmpSshSock {
+
    WithTmpDir::new(|path| -> anyhow::Result<SshAuthSock> {
+
        let sock = path.join("tmp.sock");
+
        Ok(SshAuthSock::Uds(sock))
+
    })
+
    .unwrap()
+
}
+

+
/// Run a computation with a forked `ssh-agent` on a temporary file handle.
+
///
+
/// Once the computation is finished, the `ssh-agent` is killed by getting its
+
/// PID and running the equivalent of `SSH_AGENT_PID=<pid> ssh-agent -k`.
+
/// This is a best effort of resource cleanup, but has no guarantees if the
+
/// parsing of the PID or the killin of the agent fail.
+
pub fn with_ssh_agent<F, T>(callback: F) -> anyhow::Result<T>
+
where
+
    F: FnOnce(SshAuthSock) -> anyhow::Result<T>,
+
{
+
    let sock = ssh_auth_sock();
+
    let path = match &*sock {
+
        SshAuthSock::Uds(path) => path,
+
        _ => unreachable!(),
+
    };
+
    let agent = Command::new("ssh-agent").arg("-a").arg(path).output()?;
+
    anyhow::ensure!(agent.status.success(), agent.status);
+
    let pid = agent_pid(&agent.stdout)?;
+
    let res = callback((*sock).clone());
+
    kill_agent_pid(pid)?;
+
    res
+
}
+

+
/// Kill the ssh-agent running on the given PID.
+
fn kill_agent_pid(pid: &str) -> anyhow::Result<()> {
+
    debug!(pid = %pid, "killing ssh-agent");
+
    let status = Command::new("ssh-agent")
+
        .env("SSH_AGENT_PID", pid)
+
        .args(["-k"])
+
        .stdout(Stdio::null())
+
        .status()?;
+
    debug!(status = %status, "status of killing agent");
+
    Ok(())
+
}
+

+
/// Get the PID of the launched ssh-agent.
+
///
+
/// It gets the PID by stripping the output from the command using the text
+
/// `"echo Agent pid "`.
+
fn agent_pid(out: &[u8]) -> anyhow::Result<&str> {
+
    const PREFIX: &str = "SSH_AGENT_PID=";
+
    const SEP: u8 = b';';
+
    let pid = out
+
        .split(|b| b == &SEP)
+
        .find_map(|bs| std::str::from_utf8(bs).ok()?.trim().strip_prefix(PREFIX))
+
        .ok_or_else(|| anyhow::anyhow!("could not find SSH_AGENT_PID"))?;
+
    Ok(pid)
+
}
added test/it-helpers/src/tmp.rs
@@ -0,0 +1,47 @@
+
// Copyright © 2021 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPLv3-or-later
+

+
use std::io;
+

+
use librad::{git::storage::Storage, paths::Paths, SecretKey};
+
use test_helpers::tempdir::WithTmpDir;
+

+
pub type TmpPaths = WithTmpDir<Paths>;
+

+
pub fn paths() -> TmpPaths {
+
    WithTmpDir::new(|path| -> Result<_, io::Error> {
+
        let paths = Paths::from_root(path)?;
+
        Ok::<_, io::Error>(paths)
+
    })
+
    .unwrap()
+
}
+

+
type TmpRepo = WithTmpDir<git2::Repository>;
+

+
pub fn repo() -> anyhow::Result<TmpRepo> {
+
    Ok(WithTmpDir::new(|path| {
+
        let setup = || {
+
            let repo = git2::Repository::init(path)?;
+

+
            // We need to set user info to _something_, but that doesn't have to
+
            // be valid, as we're using a shared repo with many keys
+
            let mut config = repo.config()?;
+
            config.set_str("user.name", "shared")?;
+
            config.set_str("user.email", "not.relevant@for.testing")?;
+
            Ok(repo)
+
        };
+
        setup().map_err(|e: git2::Error| io::Error::new(io::ErrorKind::Other, e))
+
    })?)
+
}
+

+
pub type TmpStorage = WithTmpDir<Storage>;
+

+
pub fn storage(signer: SecretKey) -> TmpStorage {
+
    WithTmpDir::new(|path| -> Result<_, io::Error> {
+
        let paths = Paths::from_root(path)?;
+
        let storage =
+
            Storage::open(&paths, signer).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+
        Ok::<_, io::Error>(storage)
+
    })
+
    .unwrap()
+
}
added test/it-helpers/src/working_copy.rs
@@ -0,0 +1,301 @@
+
use git_ref_format::{lit, name, refspec, Qualified, RefStr, RefString};
+

+
use librad::{
+
    git::{
+
        local::url::LocalUrl,
+
        types::{
+
            remote::{LocalFetchspec, LocalPushspec},
+
            Fetchspec,
+
            Force,
+
            Refspec,
+
            Remote,
+
        },
+
    },
+
    git_ext as ext,
+
    net::{peer::Peer, protocol::RequestPullGuard},
+
    refspec_pattern,
+
    PeerId,
+
    Signer,
+
};
+

+
use crate::fixed::TestProject;
+

+
/// A remote in the working copy
+
pub enum WorkingRemote {
+
    /// A remote representing a remote peer, named `PeerId::encode_id`
+
    Peer(PeerId),
+
    /// A remote representing the local peer, named "rad"
+
    Rad,
+
}
+

+
impl From<PeerId> for WorkingRemote {
+
    fn from(p: PeerId) -> Self {
+
        WorkingRemote::Peer(p)
+
    }
+
}
+

+
impl WorkingRemote {
+
    fn fetchspec(&self) -> Fetchspec {
+
        match self {
+
            Self::Peer(peer_id) => {
+
                let name = RefString::try_from(format!("{}", peer_id)).expect("peer is refstring");
+
                let dst = RefString::from(Qualified::from(lit::refs_remotes(name.clone())))
+
                    .with_pattern(refspec::STAR);
+
                let src = RefString::from(Qualified::from(lit::refs_remotes(name)))
+
                    .and(name::HEADS)
+
                    .with_pattern(refspec::STAR);
+
                let refspec = Refspec {
+
                    src,
+
                    dst,
+
                    force: Force::True,
+
                };
+
                refspec.into_fetchspec()
+
            },
+
            Self::Rad => {
+
                let name = RefString::try_from("rad").unwrap();
+
                let src =
+
                    RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR);
+
                Refspec {
+
                    src,
+
                    dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
+
                        .with_pattern(refspec::STAR),
+
                    force: Force::True,
+
                }
+
                .into_fetchspec()
+
            },
+
        }
+
    }
+

+
    fn remote_ref(&self, branch: &RefStr) -> RefString {
+
        let name = match self {
+
            Self::Rad => name::RAD.to_owned(),
+
            Self::Peer(peer_id) => {
+
                RefString::try_from(peer_id.to_string()).expect("peer id is refstring")
+
            },
+
        };
+
        RefString::from(Qualified::from(lit::refs_remotes(name))).join(branch)
+
    }
+
}
+

+
/// A `WorkingCopy` for test driving interactions with the monorepo where one
+
/// needs to update the tree of a project.
+
///
+
/// Remotes are named after the peer ID, except in the case of the remote
+
/// representing the local Peer ID - which is called "rad".
+
pub struct WorkingCopy<'a, S, G> {
+
    repo: git2::Repository,
+
    _repo_path: tempfile::TempDir,
+
    peer: &'a Peer<S, G>,
+
    project: &'a TestProject,
+
}
+

+
impl<'a, S, G> WorkingCopy<'a, S, G>
+
where
+
    S: Signer + Clone,
+
    G: RequestPullGuard,
+
{
+
    /// Create a new working copy. This initializes a git repository and then
+
    /// fetches the state of the local peer into `refs/remotes/rad/*`.
+
    pub fn new(
+
        project: &'a TestProject,
+
        peer: &'a Peer<S, G>,
+
    ) -> Result<WorkingCopy<'a, S, G>, anyhow::Error> {
+
        let repo_path = tempfile::tempdir()?;
+
        let repo = git2::Repository::init(repo_path.as_ref())?;
+

+
        let mut copy = WorkingCopy {
+
            peer,
+
            project,
+
            repo,
+
            _repo_path: repo_path,
+
        };
+
        copy.fetch(WorkingRemote::Rad)?;
+
        Ok(copy)
+
    }
+

+
    /// Fetch changes from the monorepo into the working copy. The fetchspec
+
    /// used depends on the peer ID.
+
    ///
+
    /// * If `from` is `WorkingRemote::Peer` then `refs/remotes/<peer
+
    ///   ID>/refs/*:refs/remotes/<peer ID>/heads/*`
+
    /// * If `from` is `WorkingRemote::Rad` then
+
    ///   `refs/heads/*:refs/remotes/rad/*`
+
    ///
+
    /// I.e. changes from remote peers end up in a remote called
+
    /// `PeerId::encode_id` whilst changes from the local peer end up in a
+
    /// remote called "rad".
+
    pub fn fetch(&mut self, from: WorkingRemote) -> Result<(), anyhow::Error> {
+
        let fetchspec = from.fetchspec();
+
        let url = LocalUrl::from(self.project.project.urn());
+
        let mut remote = Remote::rad_remote(url, fetchspec);
+
        let _ = remote.fetch(self.peer.clone(), &self.repo, LocalFetchspec::Configured)?;
+
        Ok(())
+
    }
+

+
    /// Push changes from `refs/heads/*` to the local peer
+
    pub fn push(&mut self) -> Result<(), anyhow::Error> {
+
        let url = LocalUrl::from(self.project.project.urn());
+
        let name = RefString::try_from("rad").unwrap();
+
        let fetchspec = Refspec {
+
            src: RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR),
+
            dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
+
                .with_pattern(refspec::STAR),
+
            force: Force::True,
+
        }
+
        .into_fetchspec();
+
        let mut remote = Remote::rad_remote(url, fetchspec);
+
        let _ = remote.push(
+
            self.peer.clone(),
+
            &self.repo,
+
            LocalPushspec::Matching {
+
                pattern: refspec_pattern!("refs/heads/*"),
+
                force: Force::True,
+
            },
+
        )?;
+
        Ok(())
+
    }
+

+
    /// Create a new commit on top of whichever commit is the head of
+
    /// `on_branch`. If the branch does not exist this will create it.
+
    pub fn commit(
+
        &mut self,
+
        message: &str,
+
        on_branch: Qualified,
+
    ) -> Result<git2::Oid, anyhow::Error> {
+
        let branch_name = on_branch.non_empty_components().2;
+
        let parent = match self.repo.find_branch(&branch_name, git2::BranchType::Local) {
+
            Ok(b) => b.get().target().and_then(|o| self.repo.find_commit(o).ok()),
+
            Err(e) if ext::error::is_not_found_err(&e) => None,
+
            Err(e) => return Err(anyhow::Error::from(e)),
+
        };
+
        let empty_tree = {
+
            let mut index = self.repo.index()?;
+
            let oid = index.write_tree()?;
+
            self.repo.find_tree(oid).unwrap()
+
        };
+
        let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
+
        let parents = match &parent {
+
            Some(p) => vec![p],
+
            None => Vec::new(),
+
        };
+
        self.repo
+
            .commit(
+
                Some(&on_branch),
+
                &author,
+
                &author,
+
                message,
+
                &empty_tree,
+
                &parents,
+
            )
+
            .map_err(anyhow::Error::from)
+
    }
+

+
    pub fn commit_and_push(
+
        &mut self,
+
        message: &str,
+
        on_branch: Qualified,
+
    ) -> Result<git2::Oid, anyhow::Error> {
+
        let id = self.commit(message, on_branch)?;
+
        self.push()?;
+
        Ok(id)
+
    }
+

+
    /// Create a branch at `refs/heads/<branch>` which tracks the given remote.
+
    /// The remote branch name depends on `from`.
+
    ///
+
    /// * If `from` is `WorkingCopy::Rad` then `refs/remotes/rad/<branch>`
+
    /// * If `from` is `WorkingCopy::Peer(peer_id)` then `refs/remotes/<peer
+
    ///   id>/<branch>`
+
    pub fn create_remote_tracking_branch(
+
        &self,
+
        from: WorkingRemote,
+
        branch: &RefStr,
+
    ) -> Result<(), anyhow::Error> {
+
        let target = self
+
            .repo
+
            .find_reference(from.remote_ref(branch).as_str())?
+
            .target()
+
            .ok_or_else(|| anyhow::anyhow!("remote ref is not a direct reference"))?;
+
        let commit = self.repo.find_commit(target)?;
+
        self.repo.branch(branch.as_str(), &commit, false)?;
+
        Ok(())
+
    }
+

+
    /// Fast forward the local branch `refs/heads/<branch>` to whatever is
+
    /// pointed to by `refs/remotes/<remote>/<branch>`
+
    ///
+
    /// * If `from` is `WorkingRemote::Peer(peer_id)` then `remote` is
+
    ///   `peer_id.encode_id()`
+
    /// * If `from` is `WorkingRemote::Rad` then `remote` is `"rad"`
+
    ///
+
    /// # Errors
+
    ///
+
    /// * If the local branch does not exist
+
    /// * If the remote branch does not exist
+
    /// * If either of the branches does not point at a commit
+
    /// * If the remote branch is not a descendant of the local branch
+
    pub fn fast_forward_to(&self, from: WorkingRemote, branch: &RefStr) -> anyhow::Result<()> {
+
        let remote_ref = from.remote_ref(branch);
+
        let remote_target = self
+
            .repo
+
            .find_reference(&remote_ref)?
+
            .target()
+
            .ok_or_else(|| anyhow::anyhow!("remote ref had no target"))?;
+
        let local_ref = RefString::from(Qualified::from(lit::refs_heads(branch)));
+
        let local_target = self
+
            .repo
+
            .find_reference(&local_ref)?
+
            .target()
+
            .ok_or_else(|| anyhow::anyhow!("local ref had no target"))?;
+
        if !self.repo.graph_descendant_of(remote_target, local_target)? {
+
            anyhow::bail!("remote ref was not a descendant of local ref");
+
        } else {
+
            self.repo
+
                .reference(&local_ref, remote_target, true, "fast forward")?;
+
        }
+
        Ok(())
+
    }
+

+
    /// Create a new commit which merges `refs/heads/<branch>` and
+
    /// `refs/remotes/<remote>/<branch>`
+
    ///
+
    /// this will create a new commit with two parents, one for the remote
+
    /// branch and one for the local branch
+
    ///
+
    /// # Errors
+
    ///
+
    /// * If the remote branch does not exist
+
    /// * If the local branch does not exist
+
    /// * If either of the references does not point to a commit
+
    pub fn merge_remote(&self, remote: PeerId, branch: &RefStr) -> anyhow::Result<git2::Oid> {
+
        let peer_branch = WorkingRemote::Peer(remote).remote_ref(branch);
+
        let peer_commit = self
+
            .repo
+
            .find_reference(peer_branch.as_ref())?
+
            .peel_to_commit()?;
+
        let local_branch = Qualified::from(lit::refs_heads(branch));
+
        let local_commit = self
+
            .repo
+
            .find_reference(local_branch.as_ref())?
+
            .peel_to_commit()?;
+

+
        let message = format!("merge {} into {}", peer_branch, local_branch);
+
        let empty_tree = {
+
            let mut index = self.repo.index()?;
+
            let oid = index.write_tree()?;
+
            self.repo.find_tree(oid).unwrap()
+
        };
+
        let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
+
        let parents = vec![&peer_commit, &local_commit];
+
        self.repo
+
            .commit(
+
                Some(&local_branch),
+
                &author,
+
                &author,
+
                &message,
+
                &empty_tree,
+
                &parents,
+
            )
+
            .map_err(anyhow::Error::from)
+
    }
+
}
added test/src/lib.rs
@@ -0,0 +1,4 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPL-3.0-or-later
+

+
//! This page intentionally left blank.
added test/test-helpers/Cargo.toml
@@ -0,0 +1,33 @@
+
[package]
+
name = "test-helpers"
+
version = "0.1.0"
+
edition = "2021"
+
license = "GPL-3.0-or-later"
+
publish = false
+

+
description = "Test helpers"
+

+
[lib]
+
doctest = false
+
test = false
+

+
[dependencies]
+
env_logger = ">= 0.9"
+
log = ">= 0.4"
+
pretty_assertions = "1.1"
+
serde = "1"
+
serde_json = "1"
+
tempfile = "3.3"
+
tracing = "0.1"
+
proptest = "1"
+

+
# [dependencies.link-canonical]
+
# path = "../../link-canonical"
+

+
[dependencies.minicbor]
+
version = "0.13"
+
features = ["std"]
+

+
[dependencies.tracing-subscriber]
+
version = "0.3.7"
+
features = ["std", "env-filter", "fmt", "json"]
added test/test-helpers/src/gen.rs
@@ -0,0 +1,4 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPLv3-or-later
+

+
pub mod std_net;
added test/test-helpers/src/gen/std_net.rs
@@ -0,0 +1,47 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPLv3-or-later
+

+
use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6};
+

+
use proptest::prelude::*;
+

+
prop_compose! {
+
    pub fn gen_ipaddr_v4()
+
        (a in any::<u8>(),
+
         b in any::<u8>(),
+
         c in any::<u8>(),
+
         d in any::<u8>()) -> Ipv4Addr{
+
        Ipv4Addr::new(a, b, c, d)
+
    }
+
}
+

+
prop_compose! {
+
    pub fn gen_ipaddr_v6()
+
        (a in any::<u16>(),
+
         b in any::<u16>(),
+
         c in any::<u16>(),
+
         d in any::<u16>(),
+
         e in any::<u16>(),
+
         f in any::<u16>(),
+
         g in any::<u16>(),
+
         h in any::<u16>()) -> Ipv6Addr
+
    {
+
        Ipv6Addr::new(a, b, c, d, e, f, g, h)
+
    }
+
}
+

+
pub fn gen_socket_v4() -> impl Strategy<Value = SocketAddr> {
+
    any::<u16>().prop_flat_map(move |port| {
+
        gen_ipaddr_v4().prop_map(move |v4| SocketAddr::V4(SocketAddrV4::new(v4, port)))
+
    })
+
}
+

+
pub fn gen_socket_v6() -> impl Strategy<Value = SocketAddr> {
+
    any::<u16>().prop_flat_map(move |port| {
+
        gen_ipaddr_v6().prop_map(move |v6| SocketAddr::V6(SocketAddrV6::new(v6, port, 0, 0)))
+
    })
+
}
+

+
pub fn gen_socket_addr() -> impl Strategy<Value = SocketAddr> {
+
    prop_oneof![gen_socket_v4(), gen_socket_v6()]
+
}
added test/test-helpers/src/lib.rs
@@ -0,0 +1,7 @@
+
// Copyright © 2022 The Radicle Link Contributors
+
// SPDX-License-Identifier: GPLv3-or-later
+

+
pub mod gen;
+
pub mod logging;
+
pub mod roundtrip;
+
pub mod tempdir;
added test/test-helpers/src/logging.rs
@@ -0,0 +1,56 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::env;
+

+
use log::{log_enabled, Level};
+
use tracing::subscriber::set_global_default as set_subscriber;
+
use tracing_subscriber::{EnvFilter, FmtSubscriber};
+

+
/// Initialise logging / tracing
+
///
+
/// Note that this will capture logs, so they can be output as part of the test
+
/// output. Use `RUST_LOG` with care, as this may create unwanted memory
+
/// pressure. Note, however, that if `RUST_LOG` is not set, we set the level to
+
/// `error` by default in order to surface errors on CI.
+
///
+
/// The `TRACING_FMT` environment variable can be used to control the log
+
/// formatting. Supported values:
+
///
+
/// * "pretty": [`tracing_subscriber::fmt::format::Pretty`]
+
/// * "compact": [`tracing_subscriber::fmt::format::Compact`]
+
/// * "json": [`tracing_subscriber::fmt::format::Json`]
+
///
+
/// If the variable is not set, or set to any other value, the
+
/// [`tracing_subscriber::fmt::format::Full`] format is used.
+
pub fn init() {
+
    if env_logger::builder().is_test(true).try_init().is_ok() {
+
        if env::var("RUST_LOG").is_err() {
+
            env::set_var("RUST_LOG", "debug");
+
        }
+

+
        let mut builder = FmtSubscriber::builder()
+
            .with_env_filter(EnvFilter::from_default_env())
+
            .with_test_writer();
+
        if log_enabled!(target: "librad", Level::Trace) {
+
            builder = builder.with_thread_ids(true);
+
        } else if env::var("TRACING_FMT").is_err() {
+
            let default_format = if env::var("CI").is_ok() {
+
                "compact"
+
            } else {
+
                "pretty"
+
            };
+
            env::set_var("TRACING_FMT", default_format);
+
        }
+

+
        match env::var("TRACING_FMT").ok().as_deref() {
+
            Some("pretty") => set_subscriber(builder.pretty().finish()),
+
            Some("compact") => set_subscriber(builder.compact().finish()),
+
            Some("json") => set_subscriber(builder.json().flatten_event(true).finish()),
+
            _ => set_subscriber(builder.finish()),
+
        }
+
        .expect("setting tracing subscriber failed")
+
    }
+
}
added test/test-helpers/src/roundtrip.rs
@@ -0,0 +1,36 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    fmt::{Debug, Display},
+
    str::FromStr,
+
};
+

+
use pretty_assertions::assert_eq;
+

+
pub fn json<A>(a: A)
+
where
+
    for<'de> A: Debug + PartialEq + serde::Serialize + serde::Deserialize<'de>,
+
{
+
    assert_eq!(
+
        a,
+
        serde_json::from_str(&serde_json::to_string(&a).unwrap()).unwrap()
+
    )
+
}
+

+
pub fn cbor<A>(a: A)
+
where
+
    for<'de> A: Debug + PartialEq + minicbor::Encode + minicbor::Decode<'de>,
+
{
+
    assert_eq!(a, minicbor::decode(&minicbor::to_vec(&a).unwrap()).unwrap())
+
}
+

+
pub fn str<A>(a: A)
+
where
+
    A: Debug + PartialEq + Display + FromStr,
+
    <A as FromStr>::Err: Debug,
+
{
+
    assert_eq!(a, a.to_string().parse().unwrap())
+
}
added test/test-helpers/src/tempdir.rs
@@ -0,0 +1,43 @@
+
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+
//
+
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
+
// Linking Exception. For full terms see the included LICENSE file.
+

+
use std::{
+
    io,
+
    ops::{Deref, DerefMut},
+
    path::Path,
+
};
+

+
use tempfile::{tempdir, TempDir};
+

+
pub struct WithTmpDir<A> {
+
    _tmp: TempDir,
+
    inner: A,
+
}
+

+
impl<A> WithTmpDir<A> {
+
    pub fn new<F, E>(mk_inner: F) -> Result<Self, E>
+
    where
+
        F: FnOnce(&Path) -> Result<A, E>,
+
        E: From<io::Error>,
+
    {
+
        let tmp = tempdir()?;
+
        let inner = mk_inner(tmp.path())?;
+
        Ok(Self { _tmp: tmp, inner })
+
    }
+
}
+

+
impl<A> Deref for WithTmpDir<A> {
+
    type Target = A;
+

+
    fn deref(&self) -> &Self::Target {
+
        &self.inner
+
    }
+
}
+

+
impl<A> DerefMut for WithTmpDir<A> {
+
    fn deref_mut(&mut self) -> &mut Self::Target {
+
        &mut self.inner
+
    }
+
}