Compare commits

...

2 Commits

Author SHA1 Message Date
Antonin Ruan c118829c11 feat: remove file from index 2026-03-12 19:18:33 +01:00
Antonin Ruan 3027a99b5f Index parsing and gitignore files 2026-03-11 17:49:08 +01:00
14 changed files with 1145 additions and 267 deletions
Generated
+24
View File
@@ -52,6 +52,12 @@ dependencies = [
"windows-sys", "windows-sys",
] ]
[[package]]
name = "anyhow"
version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]] [[package]]
name = "block-buffer" name = "block-buffer"
version = "0.10.4" version = "0.10.4"
@@ -152,6 +158,12 @@ dependencies = [
"version_check", "version_check",
] ]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]] [[package]]
name = "heck" name = "heck"
version = "0.5.0" version = "0.5.0"
@@ -164,6 +176,15 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "injectorpp"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d377a64bbe42f7a086ed630fbc66d84b43944f278ef42de53af79aaec6c21687"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "is_terminal_polyfill" name = "is_terminal_polyfill"
version = "1.70.2" version = "1.70.2"
@@ -204,8 +225,11 @@ dependencies = [
name = "rgit" name = "rgit"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"clap", "clap",
"glob",
"hex", "hex",
"injectorpp",
"sha1", "sha1",
"zlib-rs", "zlib-rs",
] ]
+5 -3
View File
@@ -4,11 +4,13 @@ version = "0.1.0"
description = "Git implementation in Rust" description = "Git implementation in Rust"
edition = "2024" edition = "2024"
[toolchain]
channel = "nightly"
[dependencies] [dependencies]
anyhow = "1.0.102"
clap = { version = "4.5.59", features = ["derive"] } clap = { version = "4.5.59", features = ["derive"] }
glob = "0.3.3"
hex = "0.4.3" hex = "0.4.3"
sha1 = "0.10.6" sha1 = "0.10.6"
zlib-rs = "0.6.1" zlib-rs = "0.6.1"
[dev-dependencies]
injectorpp = "0.4.0"
+2
View File
@@ -0,0 +1,2 @@
[toolchain]
channel = "nightly"
+395
View File
@@ -0,0 +1,395 @@
use anyhow::{Context, Result};
use glob::{MatchOptions, Pattern};
use std::{
fmt::Debug,
path::{Component, Path, PathBuf},
};
use crate::{
GIT_DIR,
git_fs::{normalize_path_in_worktree, read_lines},
};
use super::get_worktree_root;
const MATCH_OPTIONS: MatchOptions = MatchOptions {
case_sensitive: true,
require_literal_separator: true,
require_literal_leading_dot: false,
};
#[derive(Clone)]
struct GitIgnoreRule {
pattern: Pattern,
_inverted: bool,
only_dirs: bool,
relative: bool,
}
impl PartialEq for GitIgnoreRule {
fn eq(&self, other: &Self) -> bool {
self.pattern == other.pattern
&& self._inverted == other._inverted
&& self.only_dirs == other.only_dirs
&& self.relative == other.relative
}
}
impl Debug for GitIgnoreRule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GitIgnoreRule")
.field("pattern", &self.pattern.as_str())
.field("_inverted", &self._inverted)
.field("only_dirs", &self.only_dirs)
.field("relative", &self.relative)
.finish()
}
}
impl Default for GitIgnoreRule {
fn default() -> Self {
GitIgnoreRule {
pattern: Pattern::new("").unwrap(),
_inverted: false,
only_dirs: false,
relative: false,
}
}
}
impl GitIgnoreRule {
fn match_path(&self, path: &Path) -> bool {
if self.only_dirs && !path.is_dir() {
return false;
}
if self.relative {
return self.pattern.matches_path_with(path, MATCH_OPTIONS);
}
path.components()
.filter_map(|comp| match comp {
Component::Normal(os_str) => os_str.to_str(),
_ => None,
})
.any(|comp| self.pattern.matches_with(comp, MATCH_OPTIONS))
}
}
/// Returns a vector of PathBuf containing files expanded from a given path
/// Returned files are also filtered according to gitignore rules
pub fn expands_and_filter_path(path: PathBuf) -> Result<Vec<PathBuf>> {
// Absolute rule
let mut rules = vec![
GitIgnoreRule {
pattern: Pattern::new(GIT_DIR).unwrap(),
relative: true,
only_dirs: true,
_inverted: false,
},
GitIgnoreRule {
pattern: Pattern::new(".git").unwrap(),
relative: true,
only_dirs: true,
_inverted: false,
},
];
let wt_root = get_worktree_root()?;
if let Ok(r) = load_rules(&wt_root) {
rules.extend(r)
};
for comp in normalize_path_in_worktree(&path)?
.components()
.filter_map(|c| match c {
Component::Normal(os_str) => Some(os_str),
_ => None,
})
{
if let Ok(r) = load_rules(&wt_root.join(comp)) {
rules.extend(r)
};
}
let expanded = expand_and_filter(path, rules.clone());
Ok(expanded)
}
fn expand_and_filter(path: PathBuf, upper_rules: Vec<GitIgnoreRule>) -> Vec<PathBuf> {
for rule in upper_rules.iter() {
if rule.match_path(&normalize_path_in_worktree(&path).unwrap()) {
return Vec::new();
}
}
if path.is_dir() {
let mut local_rules = load_rules(&path).unwrap_or_default();
local_rules.extend(upper_rules);
path.read_dir()
.expect("read_dir call failed")
.flat_map(|child| match child {
Ok(c) => expand_and_filter(c.path().to_path_buf(), local_rules.clone()),
Err(_) => Vec::new(),
})
.collect()
} else {
vec![path]
}
}
fn parse_rule(raw: String, rel_to: &Path) -> Option<GitIgnoreRule> {
if raw.starts_with("#") {
return None;
}
let mut trimmed = raw.trim();
if trimmed.is_empty() {
return None;
}
let inverted = trimmed.starts_with("!");
if inverted {
trimmed = &trimmed[1..];
}
let only_dirs = trimmed.ends_with("/");
if only_dirs {
trimmed = &trimmed[..trimmed.len() - 1]
}
let start_relative = trimmed.starts_with("/");
if start_relative {
trimmed = &trimmed[1..];
}
let relative = start_relative || trimmed.contains("/");
let pattern_str = if relative {
String::from(rel_to.with_trailing_sep().to_str().unwrap()) + trimmed
} else {
String::from(trimmed)
};
let pattern = Pattern::new(&pattern_str).ok();
pattern.map(|pattern| GitIgnoreRule {
pattern,
_inverted: inverted,
only_dirs,
relative,
})
}
fn load_rules(rel_to: &Path) -> Result<Vec<GitIgnoreRule>> {
let path = rel_to.join(".gitignore");
let n_rel_to = normalize_path_in_worktree(rel_to)?;
Ok(read_lines(&path)
.with_context(|| format!("Reading from {}", path.display()))?
.map_while(Result::ok)
.filter_map(|l| parse_rule(l, &n_rel_to))
.collect())
}
#[cfg(test)]
mod tests {
use super::*;
use injectorpp::interface::injector::*;
use std::path::Path;
#[test]
fn test_parse_rule() {
let rel_to = Path::new("").to_path_buf();
assert!(parse_rule(String::from("# Some comment"), &rel_to).is_none());
assert!(parse_rule(String::from(""), &rel_to).is_none());
assert!(parse_rule(String::from(" "), &rel_to).is_none());
let test_data = [
(
"absolute_dir/",
GitIgnoreRule {
pattern: Pattern::new("absolute_dir").unwrap(),
only_dirs: true,
_inverted: false,
relative: false,
},
),
(
"relative/dir/",
GitIgnoreRule {
pattern: Pattern::new("relative/dir").unwrap(),
only_dirs: true,
_inverted: false,
relative: true,
},
),
(
"absolute_file",
GitIgnoreRule {
pattern: Pattern::new("absolute_file").unwrap(),
only_dirs: false,
_inverted: false,
relative: false,
},
),
(
"relative/file",
GitIgnoreRule {
pattern: Pattern::new("relative/file").unwrap(),
only_dirs: false,
_inverted: false,
relative: true,
},
),
(
"!_inverted",
GitIgnoreRule {
pattern: Pattern::new("_inverted").unwrap(),
only_dirs: false,
_inverted: true,
relative: false,
},
),
(
"/relative",
GitIgnoreRule {
pattern: Pattern::new("relative").unwrap(),
only_dirs: false,
_inverted: false,
relative: true,
},
),
(
"!/relative",
GitIgnoreRule {
pattern: Pattern::new("relative").unwrap(),
only_dirs: false,
_inverted: true,
relative: true,
},
),
];
for (raw, expected) in test_data {
let rule = parse_rule(String::from(raw), &rel_to).unwrap();
debug_assert_eq!(rule, expected);
}
}
#[test]
fn test_parse_rule_with_rel_to() {
let rel_to = Path::new("src").to_path_buf();
let test_data = [
(
"/some_file.c",
GitIgnoreRule {
pattern: Pattern::new("src/some_file.c").unwrap(),
only_dirs: false,
_inverted: false,
relative: true,
},
),
(
"some_file.c",
GitIgnoreRule {
pattern: Pattern::new("some_file.c").unwrap(),
only_dirs: false,
_inverted: false,
relative: false,
},
),
];
for (raw, expected) in test_data {
let rule = parse_rule(String::from(raw), &rel_to).unwrap();
debug_assert_eq!(rule, expected);
}
}
#[test]
fn test_rule_matching_non_relative() {
let rule1 = GitIgnoreRule {
pattern: Pattern::new("some_file.c").unwrap(),
..Default::default()
};
let rule2 = GitIgnoreRule {
pattern: Pattern::new("*.o").unwrap(),
..Default::default()
};
let rule3 = GitIgnoreRule {
pattern: Pattern::new("some_file.*").unwrap(),
..Default::default()
};
let path1 = Path::new("some_file.c").to_path_buf();
let path2 = Path::new("another_file.c").to_path_buf();
let path3 = Path::new("some_file.o").to_path_buf();
let path4 = Path::new("another_file.o").to_path_buf();
let path5 = Path::new("src/some_file.c").to_path_buf();
assert!(rule1.match_path(&path1));
assert!(!rule1.match_path(&path2));
assert!(!rule1.match_path(&path3));
assert!(!rule1.match_path(&path4));
assert!(rule1.match_path(&path5));
assert!(!rule2.match_path(&path1));
assert!(!rule2.match_path(&path2));
assert!(rule2.match_path(&path3));
assert!(rule2.match_path(&path4));
assert!(rule3.match_path(&path1));
assert!(!rule3.match_path(&path2));
assert!(rule3.match_path(&path3));
assert!(!rule3.match_path(&path4));
assert!(rule3.match_path(&path5));
}
#[test]
fn test_rule_matching_relative() {
let rule1 = GitIgnoreRule {
pattern: Pattern::new("some_file").unwrap(),
relative: true,
..Default::default()
};
let rule2 = GitIgnoreRule {
pattern: Pattern::new("src/ignored").unwrap(),
relative: true,
..Default::default()
};
let path1 = Path::new("some_file").to_path_buf();
let path2 = Path::new("target/some_file").to_path_buf();
let path3 = Path::new("src/ignored").to_path_buf();
let path4 = Path::new("target/src/ignored").to_path_buf();
assert!(rule1.match_path(&path1));
assert!(!rule1.match_path(&path2));
assert!(rule2.match_path(&path3));
assert!(!rule2.match_path(&path4));
}
#[test]
fn test_rule_matching_dir() {
let rule1 = GitIgnoreRule {
pattern: Pattern::new("some_folder").unwrap(),
only_dirs: true,
..Default::default()
};
let path1 = Path::new("some_folder").to_path_buf();
assert!(!rule1.match_path(&path1));
let mut injector = InjectorPP::new();
injector
.when_called(injectorpp::func!(fn (Path::is_dir)(&Path) -> bool))
.will_return_boolean(true);
assert!(rule1.match_path(&path1));
}
}
+19 -28
View File
@@ -1,11 +1,7 @@
use std::{ use anyhow::{Result, bail};
fs::File, use std::{fs::File, io::prelude::*};
io::prelude::*,
path::Path,
};
use crate::GIT_DIR; use crate::git_fs::{get_git_root, read_lines};
use crate::git_fs::read_lines;
#[derive(Debug)] #[derive(Debug)]
pub struct Head { pub struct Head {
@@ -13,39 +9,34 @@ pub struct Head {
} }
impl Head { impl Head {
pub fn load() -> Result<Self, ()> { pub fn load() -> Result<Self> {
let path = Path::new(GIT_DIR).join("HEAD"); let path = get_git_root()?.join("HEAD");
let mut lines = match read_lines(path) { let mut lines = read_lines(path)?;
Err(_) => return Err(()),
Ok(lines) => lines,
};
let content = match lines.next() { let content = match lines.next() {
Some(Ok(line)) => line, Some(line) => line?,
_ => return Err(()), _ => bail!("HEAD is empty"),
}; };
match content.split_once(": ") { match content.split_once(": ") {
None => Err(()), None => bail!("HEAD has invalid format"),
Some((_, r)) => Ok(Head { ref_to: String::from(r) }) Some((_, r)) => Ok(Head {
ref_to: String::from(r),
}),
} }
} }
pub fn save(&self) -> Result<(), ()> { pub fn save(&self) -> Result<()> {
let path = Path::new(GIT_DIR).join("HEAD"); let path = get_git_root()?.join("HEAD");
let mut content = String::from("ref: "); let mut content = String::from("ref: ");
content.push_str(&self.ref_to); content.push_str(&self.ref_to);
content.push_str("\n"); content.push('\n');
let mut file = match File::create(&path) { let mut file = File::create(&path)?;
Err(_) => return Err(()),
Ok(file) => file,
};
match file.write(content.as_bytes()) { file.write_all(content.as_bytes())?;
Err(_) => Err(()),
Ok(_) => Ok(()), Ok(())
}
} }
} }
+442 -37
View File
@@ -1,56 +1,461 @@
use std::{ use anyhow::Result;
io::{ use anyhow::bail;
self, use sha1::{Digest, Sha1};
Error, use std::fs;
}, use std::{ffi::CStr, fmt::Display, fs::File, io::prelude::*, os::unix::fs::PermissionsExt};
};
use crate::GIT_DIR; use crate::git_fs::get_git_root;
#[derive(Debug)] #[derive(Clone, Debug, PartialEq)]
pub enum ObjectType {
Regular,
SymLink,
GitLink,
}
impl Display for ObjectType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self {
Self::Regular => write!(f, "Regular"),
Self::SymLink => write!(f, "Symlink"),
Self::GitLink => write!(f, "Gitlink"),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct IndexEntry { pub struct IndexEntry {
file_size: u32, object_type: ObjectType,
permissions: u16,
hash: [u8; 20], hash: [u8; 20],
name: String, name: String,
} }
impl IndexEntry { impl IndexEntry {
fn from_bytes(bytes: Vec<u8>) -> io::Result<Self> { fn from_bytes(bytes: &[u8]) -> Result<(Self, usize)> {
let mode = &bytes[24..28];
let ot_bin = mode[2] >> 4;
let object_type = match ot_bin {
0b1000 => ObjectType::Regular,
0b1010 => ObjectType::SymLink,
0b1110 => ObjectType::GitLink,
_ => bail!("Invalid object type: {}", ot_bin),
};
let permissions = u16::from_be_bytes(mode[2..4].try_into()?) & !0xFE00;
match (permissions, &object_type) {
(0, ObjectType::GitLink) => (),
(0, ObjectType::SymLink) => (),
(0o755, ObjectType::Regular) => (),
(0o644, ObjectType::Regular) => (),
_ => bail!(
"Invalid permissions (0o{:o}) for type {}",
permissions,
object_type
),
};
return Err(Error::other("Not implemented")) let hash: [u8; 20] = bytes[40..60].try_into()?;
let cname = CStr::from_bytes_until_nul(&bytes[62..])?;
let name = String::from(cname.to_str()?);
let entry_size = usize::div_ceil(62 + cname.count_bytes() + 1, 8) * 8;
Ok((
IndexEntry {
object_type,
permissions,
hash,
name,
},
entry_size,
))
}
fn to_bytes(&self) -> Result<Vec<u8>> {
let mut bytes = Vec::new();
// ctime
bytes.extend([0, 0, 0, 0]);
// ctime nano
bytes.extend([0, 0, 0, 0]);
// mtime
bytes.extend([0, 0, 0, 0]);
// mtime nano
bytes.extend([0, 0, 0, 0]);
// dev
bytes.extend([0, 0, 0, 0]);
// ino
bytes.extend([0, 0, 0, 0]);
// mode
let ot_bin: u16 = match self.object_type {
ObjectType::Regular => 0b1000,
ObjectType::SymLink => 0b1010,
ObjectType::GitLink => 0b1110,
};
let perms: u16 = self.permissions | (ot_bin << 12);
bytes.extend([0, 0]);
bytes.extend(perms.to_be_bytes());
// uid
bytes.extend([0, 0, 0, 0]);
// gid
bytes.extend([0, 0, 0, 0]);
// file size
bytes.extend([0, 0, 0, 0]);
// object name
bytes.extend(self.hash);
// flags
let flags = u16::min(0xFFF, self.name.len() as u16);
bytes.extend(flags.to_be_bytes());
// entry path name
bytes.extend(self.name.as_bytes());
let padding = ((bytes.len() + 1).div_ceil(8) * 8) - bytes.len();
bytes.extend(vec![0; padding]);
Ok(bytes)
} }
} }
#[derive(Debug)] #[derive(Debug, PartialEq)]
pub struct Index { pub struct Index {
version: u32, version: u32,
entries: Vec<IndexEntry>, entries: Vec<IndexEntry>,
} }
impl Index { impl Default for Index {
pub fn from_bytes(bytes: Vec<u8>) -> io::Result<Self> { fn default() -> Self {
let magic: [u8; 4] = match bytes.first_chunk() { Index {
None => return Err(Error::other("Error parsing index")), version: 2,
Some(magic) => *magic, entries: Vec::new(),
}; }
}
match str::from_utf8(&magic) { }
Ok("DIRC") => (),
_ => return Err(Error::other("Invalid index")) impl Index {
}; pub fn load() -> Result<Self> {
let path = get_git_root()?.join("index");
let version_bytes = <[u8; 4]>::try_from(&bytes.as_slice()[4..8]).unwrap();
let version = u32::from_be_bytes(version_bytes); if !path.exists() {
return Ok(Index {
let count_bytes = <[u8; 4]>::try_from(&bytes.as_slice()[8..12]).unwrap(); version: 2u32,
let count = u32::from_be_bytes(count_bytes); entries: Vec::new(),
});
let entries: Vec<IndexEntry> = Vec::with_capacity(usize::try_from(count).unwrap()); }
for i in 0..=count { let mut file = File::open(&path)?;
let mut content: Vec<u8> = Vec::new();
}; file.read_to_end(&mut content)?;
return Ok(Index { version, entries}); Index::from_bytes(content)
}
pub fn save(&self) -> Result<()> {
let path = get_git_root()?.join("index");
let mut file = File::create(&path)?;
file.write_all(&self.to_bytes()?)?;
Ok(())
}
pub fn add_file(&mut self, name: String, hash: [u8; 20]) -> Result<()> {
let metadata = fs::metadata(&name)?;
if metadata.is_dir() {
bail!("Cannot add a directory to index")
};
let object_type = if metadata.is_symlink() {
ObjectType::SymLink
} else {
ObjectType::Regular
};
let permissions = (metadata.permissions().mode() as u16) & !0xFE00;
let entry = IndexEntry {
name,
hash,
object_type,
permissions,
};
self.insert_entry(entry);
Ok(())
}
pub fn remove_file(&mut self, name: String) -> Result<()> {
let entry = IndexEntry {
name: name.clone(),
hash: [0u8; 20],
object_type: ObjectType::Regular,
permissions: 0,
};
match self
.entries
.binary_search_by_key(&entry.name, |a| a.name.clone())
{
Ok(pos) => self.entries.remove(pos),
Err(_) => bail!("No file {} in index", name),
};
Ok(())
}
fn insert_entry(&mut self, entry: IndexEntry) {
match self
.entries
.binary_search_by_key(&entry.name, |e| e.name.clone())
{
Ok(pos) => self.entries[pos] = entry,
Err(pos) => self.entries.insert(pos, entry),
}
}
fn from_bytes(bytes: Vec<u8>) -> Result<Self> {
match &bytes[0..4] {
b"DIRC" => (),
_ => bail!("Invalid index signature"),
};
let version = u32::from_be_bytes(bytes[4..8].try_into()?);
let count = u32::from_be_bytes(bytes[8..12].try_into()?);
let mut entries: Vec<IndexEntry> = Vec::with_capacity(usize::try_from(count)?);
let mut offset = 12;
for _i in 0..count {
let (entry, entry_size) = IndexEntry::from_bytes(&bytes[offset..])?;
offset += entry_size;
entries.push(entry);
}
let mut hasher = Sha1::new();
hasher.update(&bytes[..offset]);
let hash = hasher.finalize();
if bytes[offset..] != *hash {
bail!("Index does not match its checksum")
}
Ok(Index { version, entries })
}
fn to_bytes(&self) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
bytes.extend("DIRC".as_bytes());
bytes.extend(self.version.to_be_bytes());
let count: u32 = self.entries.len() as u32;
bytes.extend(count.to_be_bytes());
for entry in &self.entries {
bytes.extend(entry.to_bytes()?);
}
let mut hasher = Sha1::new();
hasher.update(&bytes);
let hash = hasher.finalize();
bytes.extend(hash);
Ok(bytes)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn index_entry_to_bytes() {
let entry1 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x19u8; 20],
name: String::from("src/git_fs/head.rs"),
};
let expected1: Vec<u8> = vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x81, 0xa4,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x00, 0x12, 0x73, 0x72, 0x63, 0x2f, 0x67, 0x69, 0x74, 0x5f,
0x66, 0x73, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x2e, 0x72, 0x73, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
];
let entry2 = IndexEntry {
object_type: ObjectType::GitLink,
permissions: 0,
hash: [0x19u8; 20],
name: String::from("src/git_fs/head.r"),
};
let expected2: Vec<u8> = vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x00, 0x11, 0x73, 0x72, 0x63, 0x2f, 0x67, 0x69, 0x74, 0x5f,
0x66, 0x73, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x2e, 0x72, 0x00,
];
let entry3 = IndexEntry {
object_type: ObjectType::SymLink,
permissions: 0,
hash: [0x19u8; 20],
name: String::from("src/git_fs/head.rst"),
};
let expected3: Vec<u8> = vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xA0, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x00, 0x13, 0x73, 0x72, 0x63, 0x2f, 0x67, 0x69, 0x74, 0x5f,
0x66, 0x73, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x2e, 0x72, 0x73, 0x74, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
];
assert_eq!(entry1.to_bytes().unwrap(), expected1);
assert_eq!(entry2.to_bytes().unwrap(), expected2);
assert_eq!(entry3.to_bytes().unwrap(), expected3);
}
#[test]
fn index_entry_from_bytes() {
let expected1 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x19u8; 20],
name: String::from("src/git_fs/head.rs"),
};
let (entry1, len1) = IndexEntry::from_bytes(&vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x81, 0xa4,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x00, 0x12, 0x73, 0x72, 0x63, 0x2f, 0x67, 0x69, 0x74, 0x5f,
0x66, 0x73, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x2e, 0x72, 0x73, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
])
.unwrap();
let expected2 = IndexEntry {
object_type: ObjectType::GitLink,
permissions: 0,
hash: [0x19u8; 20],
name: String::from("src/git_fs/head.r"),
};
let (entry2, len2) = IndexEntry::from_bytes(&vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x00, 0x11, 0x73, 0x72, 0x63, 0x2f, 0x67, 0x69, 0x74, 0x5f,
0x66, 0x73, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x2e, 0x72, 0x00,
])
.unwrap();
let expected3 = IndexEntry {
object_type: ObjectType::SymLink,
permissions: 0,
hash: [0x19u8; 20],
name: String::from("src/git_fs/head.rst"),
};
let (entry3, len3) = IndexEntry::from_bytes(&vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xA0, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19, 0x19,
0x19, 0x19, 0x19, 0x19, 0x00, 0x13, 0x73, 0x72, 0x63, 0x2f, 0x67, 0x69, 0x74, 0x5f,
0x66, 0x73, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x2e, 0x72, 0x73, 0x74, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
])
.unwrap();
assert_eq!(entry1, expected1);
assert_eq!(entry2, expected2);
assert_eq!(entry3, expected3);
assert_eq!(len1, 88);
assert_eq!(len2, 80);
assert_eq!(len3, 88);
}
#[test]
fn index_insert_entry() {
let mut index = Index::default();
let entry1 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x19u8; 20],
name: String::from("a"),
};
let entry2 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x19u8; 20],
name: String::from("b"),
};
let entry3 = IndexEntry {
object_type: ObjectType::SymLink,
permissions: 0,
hash: [0x19u8; 20],
name: String::from(".hello"),
};
let entry4 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x20u8; 20],
name: String::from("b"),
};
index.insert_entry(entry1.clone());
index.insert_entry(entry2.clone());
index.insert_entry(entry3.clone());
index.insert_entry(entry4.clone());
let expected = Index {
entries: vec![entry3, entry1, entry4],
..Default::default()
};
assert_eq!(index, expected);
}
#[test]
fn index_remove_entry() {
let entry1 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x19u8; 20],
name: String::from("a"),
};
let entry2 = IndexEntry {
object_type: ObjectType::Regular,
permissions: 0o644u16,
hash: [0x19u8; 20],
name: String::from("b"),
};
let entry3 = IndexEntry {
object_type: ObjectType::SymLink,
permissions: 0,
hash: [0x19u8; 20],
name: String::from(".hello"),
};
let mut index = Index {
entries: vec![entry3.clone(), entry1, entry2.clone()],
..Default::default()
};
index.remove_file(String::from("a")).unwrap();
let expected = Index {
entries: vec![entry3, entry2],
..Default::default()
};
assert_eq!(index, expected);
} }
} }
+60 -31
View File
@@ -1,62 +1,91 @@
use anyhow::{Result, bail};
use std::{ use std::{
fs::File, fs::File,
io::{ io::{self, BufRead, prelude::*},
self, prelude::*, BufRead path::{Path, PathBuf},
}, path::Path, result
}; };
use crate::GIT_DIR; use crate::GIT_DIR;
pub mod gitignore;
pub mod head; pub mod head;
pub mod index; pub mod index;
pub mod object; pub mod object;
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>> fn read_lines<P>(filename: P) -> Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path> { where
P: AsRef<Path>,
{
let file = File::open(filename)?; let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines()) Ok(io::BufReader::new(file).lines())
} }
pub fn normalize_path_in_worktree(path: &Path) -> Result<PathBuf> {
let wt_root = get_worktree_root()?;
let canon = path.canonicalize()?;
let mut canon_it = canon.components();
for wt_c in wt_root.components() {
if let Some(c) = canon_it.next()
&& wt_c == c
{
continue;
} else {
bail!(
"'{}' is outside repository at '{}'",
path.display(),
wt_root.display()
);
}
}
Ok(canon_it.as_path().to_path_buf())
}
pub fn get_worktree_root() -> Result<PathBuf> {
let canon = Path::new(".").canonicalize()?;
for ancestor in canon.ancestors() {
let potential = ancestor.join(GIT_DIR);
if potential.is_dir() {
return Ok(ancestor.to_path_buf());
}
}
bail!("not a git repository (or any parent up to mount point)")
}
pub fn get_git_root() -> Result<PathBuf> {
Ok(get_worktree_root()?.join(GIT_DIR))
}
pub struct Ref { pub struct Ref {
ref_name: String, ref_name: String,
commit_hash: String, commit_hash: String,
} }
impl Ref { impl Ref {
pub fn load(name: String) -> result::Result<Self, ()> { pub fn load(name: String) -> Result<Self> {
let path = Path::new(GIT_DIR).join(&name); let path = get_git_root()?.join(&name);
let mut lines = match read_lines(path) { let mut lines = read_lines(path)?;
Ok(lines) => lines,
Err(_) => return Err(()),
};
let content = match lines.next() { let content = match lines.next() {
Some(Ok(line)) => line, Some(line) => line?,
_ => return Err(()), _ => bail!(""),
}; };
Ok( Ok(Ref {
Ref { ref_name: name,
ref_name: name, commit_hash: content,
commit_hash: content })
}
)
} }
pub fn save(&self) -> result::Result<(), ()> { pub fn save(&self) -> Result<()> {
let path = Path::new(GIT_DIR) let path = get_git_root()?.join("refs").join(&self.ref_name);
.join("refs")
.join(&self.ref_name);
let mut file = match File::create(&path) { let mut file = File::create(&path)?;
Err(_) => return Err(()),
Ok(file) => file,
};
match file.write(self.commit_hash.as_bytes()) { file.write_all(self.commit_hash.as_bytes())?;
Err(_) => Err(()),
Ok(_) => Ok(()), Ok(())
}
} }
} }
+61 -76
View File
@@ -1,19 +1,17 @@
use anyhow::Result;
use anyhow::bail;
use hex::encode; use hex::encode;
use sha1::{Sha1, Digest}; use sha1::{Digest, Sha1};
use std::{ use std::{
fs::{create_dir_all, File}, fs::{File, create_dir_all},
io::{ io::prelude::*,
self,
prelude::*,
Error,
},
path::Path, path::Path,
}; };
use zlib_rs::{ use zlib_rs::{
compress_bound, compress_slice, decompress_slice, DeflateConfig, InflateConfig, ReturnCode DeflateConfig, InflateConfig, ReturnCode, compress_bound, compress_slice, decompress_slice,
}; };
use crate::GIT_DIR; use crate::git_fs::get_git_root;
#[derive(Debug)] #[derive(Debug)]
pub enum GitObjectType { pub enum GitObjectType {
@@ -22,17 +20,14 @@ pub enum GitObjectType {
} }
impl GitObjectType { impl GitObjectType {
pub fn load(name: String) -> io::Result<GitObjectType> { pub fn load(name: String) -> Result<GitObjectType> {
let prefix = &name[..2]; let prefix = &name[..2];
let suffix = &name[2..]; let suffix = &name[2..];
let path = Path::new(GIT_DIR) let path = get_git_root()?.join("objects").join(prefix).join(suffix);
.join("objects")
.join(prefix)
.join(suffix);
if !path.exists() { if !path.exists() {
return Err(Error::other("Object does not exists")) bail!("Object {} does not exists", name);
} }
let mut file = File::open(path)?; let mut file = File::open(path)?;
@@ -44,23 +39,17 @@ impl GitObjectType {
match decompress_slice(&mut header_buf, &content, InflateConfig::default()) { match decompress_slice(&mut header_buf, &content, InflateConfig::default()) {
(_, ReturnCode::Ok) => (), (_, ReturnCode::Ok) => (),
(_, ReturnCode::BufError) => (), (_, ReturnCode::BufError) => (),
_ => return Err(Error::other("Error while decompressing")) _ => bail!("Error while decompressing"),
}; };
let header = match str::from_utf8(&header_buf) { let header = match str::from_utf8(&header_buf)?.split_once("\0") {
Ok(s) => match s.split_once("\0") { None => bail!("Object invalid format, did not found '\0'"),
None => return Err(Error::other("Invalid format")), Some((header, _)) => header,
Some((header, _)) => header,
}
Err(_) => return Err(Error::other("Utf-8 error"))
}; };
let (t, size) = match header.split_once(" ") { let (t, size) = match header.split_once(" ") {
None => return Err(Error::other("Invalid format")), None => bail!("Object head is invalid, did not found ' '"),
Some((t, s_str)) => match s_str.parse::<usize>() { Some((t, s_str)) => (t, s_str.parse::<usize>()?),
Ok(s) => (t, s),
_ => return Err(Error::other("Invalid format")),
}
}; };
let header_size = header.len() + 1; let header_size = header.len() + 1;
let total_size = size + header_size; let total_size = size + header_size;
@@ -69,42 +58,41 @@ impl GitObjectType {
match decompress_slice(&mut deflated, &content, InflateConfig::default()) { match decompress_slice(&mut deflated, &content, InflateConfig::default()) {
(_, ReturnCode::Ok) => (), (_, ReturnCode::Ok) => (),
_ => return Err(Error::other("Error while decompressing")) _ => bail!("Error while decompressing {}", name),
} }
let obj_bytes = deflated[header_size..].to_vec(); let obj_bytes = deflated[header_size..].to_vec();
match t { match t {
"blob" => Ok(GitObjectType::Blob(Blob{ "blob" => Ok(GitObjectType::Blob(Blob {
size, size,
content: obj_bytes, content: obj_bytes,
})), })),
"tree" => Ok(GitObjectType::Tree(Tree::from_bytes(obj_bytes)?)), "tree" => Ok(GitObjectType::Tree(Tree::from_bytes(obj_bytes)?)),
_ => Err(Error::other("Invalid type")) _ => bail!("Invalid object type: {}", t),
} }
} }
} }
pub trait GitObject { pub trait GitObject {
fn raw(&self) -> Vec<u8>; fn raw(&self) -> Vec<u8>;
fn hash(&self, write: bool) -> io::Result<Vec<u8>>; fn hash(&self, write: bool) -> Result<Vec<u8>>;
fn save(&self, hash: &String, bytes: Vec<u8>) -> io::Result<()> { fn save(&self, hash: &str, bytes: Vec<u8>) -> Result<()> {
let mut compressed_buf = vec![0u8; compress_bound(bytes.len())]; let mut compressed_buf = vec![0u8; compress_bound(bytes.len())];
let compressed = match compress_slice(&mut compressed_buf, &bytes, DeflateConfig::default()) { let compressed = match compress_slice(&mut compressed_buf, &bytes, DeflateConfig::default())
{
(compressed, ReturnCode::Ok) => compressed, (compressed, ReturnCode::Ok) => compressed,
(_, _) => return Err(Error::other("Error while compressing objects")), (_, _) => bail!("Error while compressing object {}", hash),
}; };
let prefix = &hash[..2]; let prefix = &hash[..2];
let suffix = &hash[2..]; let suffix = &hash[2..];
let path = Path::new(GIT_DIR) let path = get_git_root()?.join("objects").join(prefix);
.join("objects")
.join(prefix);
create_dir_all(&path)?; create_dir_all(&path)?;
let mut file = File::create(&path.join(suffix))?; let mut file = File::create(path.join(suffix))?;
file.write_all(&compressed)?; file.write_all(compressed)?;
Ok(()) Ok(())
} }
@@ -117,7 +105,7 @@ pub struct Blob {
} }
impl Blob { impl Blob {
pub fn create(filename: String) -> io::Result<Self> { pub fn create(filename: String) -> Result<Self> {
let path = Path::new(&filename); let path = Path::new(&filename);
let mut file = File::open(path)?; let mut file = File::open(path)?;
@@ -125,7 +113,10 @@ impl Blob {
let mut content: Vec<u8> = Vec::new(); let mut content: Vec<u8> = Vec::new();
let read = file.read_to_end(&mut content)?; let read = file.read_to_end(&mut content)?;
Ok(Self { size: read, content }) Ok(Self {
size: read,
content,
})
} }
} }
@@ -133,15 +124,15 @@ impl GitObject for Blob {
fn raw(&self) -> Vec<u8> { fn raw(&self) -> Vec<u8> {
let mut header = String::from("blob "); let mut header = String::from("blob ");
header.push_str(&self.size.to_string()); header.push_str(&self.size.to_string());
header.push_str("\0"); header.push('\0');
let mut to_compress: Vec<u8> = Vec::from(header.as_bytes()); let mut to_compress: Vec<u8> = Vec::from(header.as_bytes());
to_compress.extend(&self.content); to_compress.extend(&self.content);
return to_compress to_compress
} }
fn hash(&self, write: bool) -> io::Result<Vec<u8>> { fn hash(&self, write: bool) -> Result<Vec<u8>> {
let bytes = self.raw(); let bytes = self.raw();
let mut hasher = Sha1::new(); let mut hasher = Sha1::new();
@@ -174,31 +165,27 @@ pub struct TreeEntry {
} }
impl TreeEntry { impl TreeEntry {
fn from_bytes(bytes: Vec<u8>) -> io::Result<Self> { fn from_bytes(bytes: Vec<u8>) -> Result<Self> {
let split = match bytes.as_slice().iter().position(|x| *x == 0) { let split = match bytes.as_slice().iter().position(|x| *x == 0) {
None => return Err(Error::other("Error parsing tree object")), None => bail!("Error parsing tree entry, did not found '\0'"),
Some(s) => s, Some(s) => s,
}; };
let (header, hash_v) = bytes.split_at(split+1); let (header, hash_v) = bytes.split_at(split + 1);
if hash_v.len() != 20 { if hash_v.len() != 20 {
return Err(Error::other("Digest length not valid")); bail!("Digest length not valid");
} }
let hash: [u8; 20] = match hash_v.first_chunk() { let hash: [u8; 20] = match hash_v.first_chunk() {
None => return Err(Error::other("Error parsing tree object")), None => bail!("Error parsing tree entry, incomplete entry"),
Some(h) => *h, Some(h) => *h,
}; };
let header_str = str::from_utf8(&header[..header.len() - 1])?;
let header_str = match str::from_utf8(&header[..header.len()-1]) {
Ok(s) => s,
_ => return Err(Error::other("Error parsing tree object"))
};
let (ftype, name) = match header_str.split_once(" ") { let (ftype, name) = match header_str.split_once(" ") {
None => return Err(Error::other("Error parsing tree object")), None => bail!("Error parsing tree entry"),
Some((l, name)) => { Some((l, name)) => {
let ftype = match l { let ftype = match l {
"0040000" => FileType::Directory, "0040000" => FileType::Directory,
@@ -206,17 +193,13 @@ impl TreeEntry {
"0100755" => FileType::RegExeFile, "0100755" => FileType::RegExeFile,
"0120000" => FileType::SymLink, "0120000" => FileType::SymLink,
"0160000" => FileType::GitLink, "0160000" => FileType::GitLink,
_ => return Err(Error::other("Invalid file type")) _ => bail!("Invalid file type"),
}; };
(ftype, String::from(name)) (ftype, String::from(name))
} }
}; };
return Ok(TreeEntry { Ok(TreeEntry { ftype, name, hash })
ftype,
name,
hash,
});
} }
fn as_bytes(&self) -> Vec<u8> { fn as_bytes(&self) -> Vec<u8> {
@@ -227,28 +210,30 @@ impl TreeEntry {
FileType::SymLink => "0120000", FileType::SymLink => "0120000",
FileType::GitLink => "0160000", FileType::GitLink => "0160000",
}); });
header.push_str(" "); header.push(' ');
header.push_str(&self.name); header.push_str(&self.name);
header.push_str("\0"); header.push('\0');
let mut bytes: Vec<u8> = Vec::from(header.as_bytes()); let mut bytes: Vec<u8> = Vec::from(header.as_bytes());
bytes.extend(self.hash); bytes.extend(self.hash);
return bytes; bytes
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Tree { pub struct Tree {
pub entries: Vec<TreeEntry> pub entries: Vec<TreeEntry>,
} }
impl Tree { impl Tree {
pub fn from_bytes(bytes: Vec<u8>) -> io::Result<Self> { pub fn from_bytes(bytes: Vec<u8>) -> Result<Self> {
let mut entries: Vec<TreeEntry> = Vec::new(); let mut entries: Vec<TreeEntry> = Vec::new();
let splits = bytes.iter() let splits = bytes
.iter()
.enumerate() .enumerate()
.filter_map(|(idx, b)| (*b == 0).then(|| idx + 21)) .filter(|&(_, b)| *b == 0)
.map(|(idx, _)| idx + 21)
.collect::<Vec<usize>>(); .collect::<Vec<usize>>();
let bytes_slice = bytes.as_slice(); let bytes_slice = bytes.as_slice();
@@ -263,7 +248,7 @@ impl Tree {
left_i = right_i; left_i = right_i;
} }
return Ok(Tree { entries }); Ok(Tree { entries })
} }
fn as_bytes(&self) -> Vec<u8> { fn as_bytes(&self) -> Vec<u8> {
@@ -272,7 +257,7 @@ impl Tree {
result.extend(entry.as_bytes()); result.extend(entry.as_bytes());
} }
return result; result
} }
} }
@@ -281,15 +266,15 @@ impl GitObject for Tree {
let bytes = self.as_bytes(); let bytes = self.as_bytes();
let mut header = String::from("tree "); let mut header = String::from("tree ");
header.push_str(&bytes.len().to_string()); header.push_str(&bytes.len().to_string());
header.push_str("\0"); header.push('\0');
let mut raw: Vec<u8> = Vec::from(header.as_bytes()); let mut raw: Vec<u8> = Vec::from(header.as_bytes());
raw.extend(self.as_bytes()); raw.extend(self.as_bytes());
return raw; raw
} }
fn hash(&self, write: bool) -> io::Result<Vec<u8>> { fn hash(&self, write: bool) -> Result<Vec<u8>> {
let bytes = self.raw(); let bytes = self.raw();
let mut hasher = Sha1::new(); let mut hasher = Sha1::new();
@@ -301,6 +286,6 @@ impl GitObject for Tree {
self.save(&encode(hash), bytes)?; self.save(&encode(hash), bytes)?;
} }
return Ok(Vec::from(hash.as_slice())); Ok(Vec::from(hash.as_slice()))
} }
} }
+4 -45
View File
@@ -1,25 +1,15 @@
#![feature(path_trailing_sep)]
use clap::Parser; use clap::Parser;
// use std::{
// fs::File,
// io::prelude::*,
// path::Path,
// };
// use zlib_rs::{
// ReturnCode,
// DeflateConfig, compress_bound, compress_slice,
// };
use subcommands::{Subcommand, SubcommandType};
mod subcommands;
mod git_fs; mod git_fs;
mod subcommands;
use subcommands::{Subcommand, SubcommandType};
const GIT_DIR: &str = ".rgit"; const GIT_DIR: &str = ".rgit";
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(version, about, long_about)] #[command(version, about, long_about)]
struct CmdArgs{ struct CmdArgs {
#[command(subcommand)] #[command(subcommand)]
cmd: SubcommandType, cmd: SubcommandType,
} }
@@ -31,35 +21,4 @@ fn main() {
Err(str) => println!("Some error occured: {str}"), Err(str) => println!("Some error occured: {str}"),
Ok(str) => println!("{str}"), Ok(str) => println!("{str}"),
} }
// let path = Path::new("sample/file.txt");
// let display = path.display();
// let mut file = match File::open(&path) {
// Err(err) => panic!("couldn't open {}: {}", display, err),
// Ok(file) => file,
// };
// let mut s = String::new();
// match file.read_to_string(&mut s) {
// Err(err) => panic!("couldn't read {}: {}", display, err),
// Ok(_) => print!("content: \n{}", s),
// }
// let mut compressed_buf = vec![0u8; compress_bound(s.len())];
// let compressed = match compress_slice(&mut compressed_buf, s.as_bytes(), DeflateConfig::default()) {
// (compressed, ReturnCode::Ok) => compressed,
// (_, _) => panic!("Error while compressing"),
// };
// let path_w = Path::new("sample/write");
// file = match File::create(&path_w) {
// Err(_) => panic!("error while opening file"),
// Ok(file) => file,
// };
// match file.write_all(compressed) {
// Err(_) => panic!("error while writing"),
// Ok(_) => println!("File written"),
// }
} }
+43
View File
@@ -0,0 +1,43 @@
use anyhow::Result;
use clap::Parser;
use std::path::Path;
use crate::{
git_fs::{gitignore::expands_and_filter_path, index::Index, normalize_path_in_worktree},
subcommands::hash_object::HashObjectSubcommand,
};
use super::Subcommand;
#[derive(Parser, Debug)]
pub struct AddSubcommand {
pub paths: Vec<String>,
}
impl Subcommand for AddSubcommand {
fn run(&self) -> Result<String> {
if self.paths.is_empty() {
return Ok(String::from("Nothing specified, nothing added"));
}
let mut index = Index::load()?;
for path in self.paths.iter().map(|p| Path::new(p).to_path_buf()) {
let expanded = expands_and_filter_path(path)?;
for p in expanded {
let path_str = String::from(p.to_str().unwrap());
let name = String::from(normalize_path_in_worktree(&p)?.to_str().unwrap());
let hash = HashObjectSubcommand {
write: true,
path: path_str,
}
.run_raw()?;
index.add_file(name, hash)?;
}
}
index.save()?;
Ok(String::from(""))
}
}
+19 -13
View File
@@ -1,32 +1,38 @@
use anyhow::{Result, bail};
use clap::Parser; use clap::Parser;
use hex::encode; use hex::encode;
use crate::{ use crate::{
git_fs::object::{Blob, GitObject}, git_fs::object::{Blob, GitObject},
subcommands::Subcommand subcommands::Subcommand,
}; };
use super::CmdResult;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
pub struct HashObjectSubcommand { pub struct HashObjectSubcommand {
#[arg(short)] #[arg(short, default_value_t = false)]
/// Save object in database /// Save object in database
pub write: bool, pub write: bool,
pub path: String, pub path: String,
} }
impl Subcommand for HashObjectSubcommand { impl HashObjectSubcommand {
fn run (&self) -> CmdResult { pub fn run_raw(&self) -> Result<[u8; 20]> {
let object = match Blob::create(self.path.clone()) { let object = Blob::create(self.path.clone())?;
Ok(o) => o, let hash = object.hash(self.write)?;
_ => return Err("".to_owned())
let hash_final: [u8; 20] = match hash.first_chunk() {
Some(h) => *h,
None => bail!("Hash length not valid"),
}; };
match object.hash(self.write) { Ok(hash_final)
Ok(hash) => Ok(encode(hash)), }
_ => return Err("".to_owned()) }
}
impl Subcommand for HashObjectSubcommand {
fn run(&self) -> Result<String> {
let hash = self.run_raw()?;
Ok(encode(hash))
} }
} }
+22 -28
View File
@@ -1,14 +1,11 @@
use anyhow::Result;
use anyhow::bail;
use clap::Parser; use clap::Parser;
use std::{ use std::{fs, path::Path};
fs,
path::Path
};
use crate::GIT_DIR;
use crate::git_fs::head::Head; use crate::git_fs::head::Head;
use crate::subcommands::Subcommand; use crate::subcommands::Subcommand;
use crate::GIT_DIR;
use super::CmdResult;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
pub struct InitSubcommand { pub struct InitSubcommand {
@@ -16,45 +13,42 @@ pub struct InitSubcommand {
} }
impl Subcommand for InitSubcommand { impl Subcommand for InitSubcommand {
fn run(&self) -> CmdResult { fn run(&self) -> Result<String> {
let path = match &self.directory { let path = match &self.directory {
None => Path::new("."), None => Path::new("."),
Some(path) => Path::new(path), Some(path) => Path::new(path),
}.join(GIT_DIR); }
.join(GIT_DIR);
let new_repo = path.exists(); let new_repo = path.exists();
match fs::create_dir_all(&path) { if fs::create_dir_all(&path).is_err() {
Err(_) => return Err("Error while creating dir".to_owned()), bail!("Error while creating dir")
Ok(()) => (),
}; };
let folders = [ let folders = ["objects/info", "objects/pack", "refs/heads", "refs/tags"];
"objects/info",
"objects/pack",
"refs/heads",
"refs/tags",
];
for folder in folders { for folder in folders {
match fs::create_dir_all(&path.join(folder)) { fs::create_dir_all(path.join(folder))?;
Err(_) => return Err("".to_owned()),
Ok(()) => (),
};
} }
let head = Head { ref_to: String::from("refs/heads/master") }; let head = Head {
match head.save() { ref_to: String::from("refs/heads/master"),
Err(_) => return Err("".to_owned()),
Ok(()) => (),
}; };
head.save()?;
let canonical_path = path.canonicalize(); let canonical_path = path.canonicalize();
if new_repo { if new_repo {
Ok(format!("Reinitialized exisiting Git repo in {}", canonical_path.unwrap().display())) Ok(format!(
"Reinitialized exisiting Git repo in {}",
canonical_path?.display()
))
} else { } else {
Ok(format!("Initialized empty Git repo in {}", canonical_path.unwrap().display())) Ok(format!(
"Initialized empty Git repo in {}",
canonical_path?.display()
))
} }
} }
} }
+12 -6
View File
@@ -1,17 +1,21 @@
use crate::subcommands::{ use crate::subcommands::{
init::InitSubcommand, add::AddSubcommand, hash_object::HashObjectSubcommand, init::InitSubcommand, rm::RmSubcommand,
test::TestSubcommand, test::TestSubcommand,
hash_object::HashObjectSubcommand,
}; };
use anyhow::Result;
mod add;
mod hash_object; mod hash_object;
mod init; mod init;
mod rm;
mod test; mod test;
pub type CmdResult = Result<String, String>;
#[derive(clap::Parser, Debug)] #[derive(clap::Parser, Debug)]
pub enum SubcommandType { pub enum SubcommandType {
/// Add file(s) to index
Add(AddSubcommand),
/// Remove file(s) from the index
Rm(RmSubcommand),
/// Init a Git repository /// Init a Git repository
Init(InitSubcommand), Init(InitSubcommand),
HashObject(HashObjectSubcommand), HashObject(HashObjectSubcommand),
@@ -19,12 +23,14 @@ pub enum SubcommandType {
} }
pub trait Subcommand { pub trait Subcommand {
fn run(&self) -> CmdResult; fn run(&self) -> Result<String>;
} }
impl Subcommand for SubcommandType { impl Subcommand for SubcommandType {
fn run(&self) -> CmdResult { fn run(&self) -> Result<String> {
match self { match self {
Self::Add(cmd) => cmd.run(),
Self::Rm(cmd) => cmd.run(),
Self::Init(cmd) => cmd.run(), Self::Init(cmd) => cmd.run(),
Self::HashObject(cmd) => cmd.run(), Self::HashObject(cmd) => cmd.run(),
Self::Test(cmd) => cmd.run(), Self::Test(cmd) => cmd.run(),
+37
View File
@@ -0,0 +1,37 @@
use anyhow::Result;
use clap::Parser;
use std::path::Path;
use crate::git_fs::{gitignore::expands_and_filter_path, index::Index};
use super::Subcommand;
#[derive(Parser, Debug)]
pub struct RmSubcommand {
#[arg(short, long, default_value_t = false)]
/// Only remove object in index
pub cached: bool,
pub paths: Vec<String>,
}
impl Subcommand for RmSubcommand {
fn run(&self) -> Result<String> {
if self.paths.is_empty() {
return Ok(String::from("Nothing specified, nothing removed"));
}
let mut index = Index::load()?;
for path in self.paths.iter().map(|p| Path::new(p).to_path_buf()) {
let expanded = expands_and_filter_path(path)?;
for p in expanded {
index.remove_file(String::from(p.to_str().unwrap()))?;
}
}
index.save()?;
Ok(String::from(""))
}
}