Skip to content

Commit 4122306

Browse files
committed
remove byteorder from git-pack (#293)
It's sufficiently well supported using the standard library now.
1 parent c526811 commit 4122306

File tree

11 files changed

+46
-53
lines changed

11 files changed

+46
-53
lines changed

Cargo.lock

-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

git-pack/Cargo.toml

-1
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@ git-tempfile = { version ="^1.0.0", path = "../git-tempfile" }
4343

4444
smallvec = "1.3.0"
4545
memmap2 = "0.5.0"
46-
byteorder = "1.2.3"
4746
serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"] }
4847
bytesize = "1.0.1"
4948
os_str_bytes = "6.0.0"

git-pack/src/data/header.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
use byteorder::{BigEndian, ByteOrder};
2-
31
use crate::data;
42

53
pub(crate) const N32_SIZE: usize = std::mem::size_of::<u32>();
@@ -11,13 +9,13 @@ pub fn decode(data: &[u8; 12]) -> Result<(data::Version, u32), decode::Error> {
119
return Err(decode::Error::Corrupt("Pack data type not recognized".into()));
1210
}
1311
ofs += N32_SIZE;
14-
let kind = match BigEndian::read_u32(&data[ofs..ofs + N32_SIZE]) {
12+
let kind = match crate::read_u32(&data[ofs..ofs + N32_SIZE]) {
1513
2 => data::Version::V2,
1614
3 => data::Version::V3,
1715
v => return Err(decode::Error::UnsupportedVersion(v)),
1816
};
1917
ofs += N32_SIZE;
20-
let num_objects = BigEndian::read_u32(&data[ofs..ofs + N32_SIZE]);
18+
let num_objects = crate::read_u32(&data[ofs..ofs + N32_SIZE]);
2119

2220
Ok((kind, num_objects))
2321
}

git-pack/src/index/access.rs

+13-16
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
use std::mem::size_of;
22

3-
use byteorder::{BigEndian, ByteOrder};
4-
53
use crate::{
64
data,
75
index::{self, EntryIndex, FAN_LEN},
@@ -39,7 +37,7 @@ impl index::File {
3937
let (ofs, oid) = c.split_at(N32_SIZE);
4038
Entry {
4139
oid: git_hash::ObjectId::from(oid),
42-
pack_offset: BigEndian::read_u32(ofs) as u64,
40+
pack_offset: crate::read_u32(ofs) as u64,
4341
crc32: None,
4442
}
4543
}),
@@ -59,7 +57,7 @@ impl index::File {
5957
.map(move |(oid, crc32, ofs32)| Entry {
6058
oid: git_hash::ObjectId::from(oid),
6159
pack_offset: self.pack_offset_from_offset_v2(ofs32, pack64_offset),
62-
crc32: Some(BigEndian::read_u32(crc32)),
60+
crc32: Some(crate::read_u32(crc32)),
6361
}),
6462
_ => panic!("Cannot use iter_v2() on index of type {:?}", self.version),
6563
}
@@ -94,7 +92,7 @@ impl index::File {
9492
}
9593
index::Version::V1 => {
9694
let start = V1_HEADER_SIZE + index * (N32_SIZE + self.hash_len);
97-
BigEndian::read_u32(&self.data[start..][..N32_SIZE]) as u64
95+
crate::read_u32(&self.data[start..][..N32_SIZE]) as u64
9896
}
9997
}
10098
}
@@ -110,7 +108,7 @@ impl index::File {
110108
match self.version {
111109
index::Version::V2 => {
112110
let start = self.offset_crc32_v2() + index * N32_SIZE;
113-
Some(BigEndian::read_u32(&self.data[start..start + N32_SIZE]))
111+
Some(crate::read_u32(&self.data[start..start + N32_SIZE]))
114112
}
115113
index::Version::V1 => None,
116114
}
@@ -153,14 +151,13 @@ impl index::File {
153151
let mut ofs: Vec<_> = match self.version {
154152
index::Version::V1 => self.iter().map(|e| e.pack_offset).collect(),
155153
index::Version::V2 => {
156-
let mut v = Vec::with_capacity(self.num_objects as usize);
157-
let mut ofs32 = &self.data[self.offset_pack_offset_v2()..];
158-
let pack_offset_64 = self.offset_pack_offset64_v2();
159-
for _ in 0..self.num_objects {
160-
v.push(self.pack_offset_from_offset_v2(ofs32, pack_offset_64));
161-
ofs32 = &ofs32[4..];
162-
}
163-
v
154+
let offset32_start = &self.data[self.offset_pack_offset_v2()..];
155+
let pack_offset_64_start = self.offset_pack_offset64_v2();
156+
offset32_start
157+
.chunks(N32_SIZE)
158+
.take(self.num_objects as usize)
159+
.map(|offset| self.pack_offset_from_offset_v2(offset, pack_offset_64_start))
160+
.collect()
164161
}
165162
};
166163
ofs.sort_unstable();
@@ -185,10 +182,10 @@ impl index::File {
185182
#[inline]
186183
fn pack_offset_from_offset_v2(&self, offset: &[u8], pack64_offset: usize) -> data::Offset {
187184
debug_assert_eq!(self.version, index::Version::V2);
188-
let ofs32 = BigEndian::read_u32(offset);
185+
let ofs32 = crate::read_u32(offset);
189186
if (ofs32 & N32_HIGH_BIT) == N32_HIGH_BIT {
190187
let from = pack64_offset + (ofs32 ^ N32_HIGH_BIT) as usize * N64_SIZE;
191-
BigEndian::read_u64(&self.data[from..][..N64_SIZE])
188+
crate::read_u64(&self.data[from..][..N64_SIZE])
192189
} else {
193190
ofs32 as u64
194191
}

git-pack/src/index/init.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
use std::{mem::size_of, path::Path};
22

3-
use byteorder::{BigEndian, ByteOrder};
4-
53
use crate::index::{self, Version, FAN_LEN, V2_SIGNATURE};
64

75
/// Returned by [`index::File::at()`].
@@ -57,7 +55,7 @@ impl index::File {
5755
let d = {
5856
if let Version::V2 = kind {
5957
let (vd, dr) = d.split_at(N32_SIZE);
60-
let version = BigEndian::read_u32(vd);
58+
let version = crate::read_u32(vd);
6159
if version != Version::V2 as u32 {
6260
return Err(Error::UnsupportedVersion { version });
6361
}
@@ -87,7 +85,7 @@ impl index::File {
8785
fn read_fan(d: &[u8]) -> ([u32; FAN_LEN], usize) {
8886
let mut fan = [0; FAN_LEN];
8987
for (c, f) in d.chunks(N32_SIZE).zip(fan.iter_mut()) {
90-
*f = BigEndian::read_u32(c);
88+
*f = crate::read_u32(c);
9189
}
9290
(fan, FAN_LEN * N32_SIZE)
9391
}

git-pack/src/index/write/encode.rs

+7-7
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ use std::{cmp::Ordering, io};
33
pub(crate) const LARGE_OFFSET_THRESHOLD: u64 = 0x7fff_ffff;
44
pub(crate) const HIGH_BIT: u32 = 0x8000_0000;
55

6-
use byteorder::{BigEndian, WriteBytesExt};
76
use git_features::{
87
hash,
98
progress::{self, Progress},
@@ -35,15 +34,15 @@ pub(crate) fn write_to(
3534
hash::Write::new(out, kind.hash()),
3635
));
3736
out.write_all(V2_SIGNATURE)?;
38-
out.write_u32::<BigEndian>(kind as u32)?;
37+
out.write_all(&(kind as u32).to_be_bytes())?;
3938

4039
progress.init(Some(4), progress::steps());
4140
let start = std::time::Instant::now();
4241
let _info = progress.add_child("writing fan-out table");
4342
let fan_out = fanout(entries_sorted_by_oid.iter().map(|e| e.data.id.first_byte()));
4443

4544
for value in fan_out {
46-
out.write_u32::<BigEndian>(value)?;
45+
out.write_all(&value.to_be_bytes())?;
4746
}
4847

4948
progress.inc();
@@ -55,15 +54,15 @@ pub(crate) fn write_to(
5554
progress.inc();
5655
let _info = progress.add_child("writing crc32");
5756
for entry in &entries_sorted_by_oid {
58-
out.write_u32::<BigEndian>(entry.data.crc32)?;
57+
out.write_all(&entry.data.crc32.to_be_bytes())?;
5958
}
6059

6160
progress.inc();
6261
let _info = progress.add_child("writing offsets");
6362
{
6463
let mut offsets64 = Vec::<u64>::new();
6564
for entry in &entries_sorted_by_oid {
66-
out.write_u32::<BigEndian>(if entry.offset > LARGE_OFFSET_THRESHOLD {
65+
let offset: u32 = if entry.offset > LARGE_OFFSET_THRESHOLD {
6766
assert!(
6867
offsets64.len() < LARGE_OFFSET_THRESHOLD as usize,
6968
"Encoding breakdown - way too many 64bit offsets"
@@ -72,10 +71,11 @@ pub(crate) fn write_to(
7271
((offsets64.len() - 1) as u32) | HIGH_BIT
7372
} else {
7473
entry.offset as u32
75-
})?;
74+
};
75+
out.write_all(&offset.to_be_bytes())?;
7676
}
7777
for value in offsets64 {
78-
out.write_u64::<BigEndian>(value)?;
78+
out.write_all(&value.to_be_bytes())?;
7979
}
8080
}
8181

git-pack/src/lib.rs

+13
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ pub mod cache;
3232
pub mod data;
3333

3434
mod find_traits;
35+
3536
pub use find_traits::{Find, FindExt};
3637

3738
///
@@ -54,3 +55,15 @@ mod mmap {
5455
}
5556
}
5657
}
58+
59+
use std::convert::TryInto;
60+
61+
#[inline]
62+
fn read_u32(b: &[u8]) -> u32 {
63+
u32::from_be_bytes(b.try_into().unwrap())
64+
}
65+
66+
#[inline]
67+
fn read_u64(b: &[u8]) -> u64 {
68+
u64::from_be_bytes(b.try_into().unwrap())
69+
}

git-pack/src/multi_index/access.rs

+3-5
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
use std::path::{Path, PathBuf};
22

3-
use byteorder::{BigEndian, ByteOrder};
4-
53
use crate::{
64
data,
75
multi_index::{EntryIndex, File, PackIndex, Version},
@@ -102,15 +100,15 @@ impl File {
102100

103101
const HIGH_BIT: u32 = 1 << 31;
104102

105-
let pack_index = BigEndian::read_u32(&self.data[start..][..4]);
103+
let pack_index = crate::read_u32(&self.data[start..][..4]);
106104
let offset = &self.data[start + 4..][..4];
107-
let ofs32 = BigEndian::read_u32(offset);
105+
let ofs32 = crate::read_u32(offset);
108106
let pack_offset = if (ofs32 & HIGH_BIT) == HIGH_BIT {
109107
// We determine if large offsets are actually larger than 4GB and if not, we don't use the high-bit to signal anything
110108
// but allow the presence of the large-offset chunk to signal what's happening.
111109
if let Some(offsets_64) = self.large_offsets_ofs {
112110
let from = offsets_64 + (ofs32 ^ HIGH_BIT) as usize * 8;
113-
BigEndian::read_u64(&self.data[from..][..8])
111+
crate::read_u64(&self.data[from..][..8])
114112
} else {
115113
ofs32 as u64
116114
}

git-pack/src/multi_index/chunk.rs

+4-10
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,6 @@ pub mod index_names {
108108
pub mod fanout {
109109
use std::convert::TryInto;
110110

111-
use byteorder::{BigEndian, WriteBytesExt};
112-
113111
use crate::multi_index;
114112

115113
/// The size of the fanout table
@@ -138,7 +136,7 @@ pub mod fanout {
138136
let fanout = crate::index::write::encode::fanout(sorted_entries.iter().map(|e| e.id.first_byte()));
139137

140138
for value in fanout {
141-
out.write_u32::<BigEndian>(value)?;
139+
out.write_all(&value.to_be_bytes())?;
142140
}
143141
Ok(())
144142
}
@@ -178,8 +176,6 @@ pub mod lookup {
178176
pub mod offsets {
179177
use std::{convert::TryInto, ops::Range};
180178

181-
use byteorder::{BigEndian, WriteBytesExt};
182-
183179
use crate::multi_index;
184180

185181
/// The id uniquely identifying the offsets table.
@@ -199,7 +195,7 @@ pub mod offsets {
199195
let mut num_large_offsets = 0u32;
200196

201197
for entry in sorted_entries {
202-
out.write_u32::<BigEndian>(entry.pack_index)?;
198+
out.write_all(&entry.pack_index.to_be_bytes())?;
203199

204200
let offset: u32 = if large_offsets_needed {
205201
if entry.pack_offset > LARGE_OFFSET_THRESHOLD {
@@ -215,7 +211,7 @@ pub mod offsets {
215211
.try_into()
216212
.expect("without large offsets, pack-offset fits u32")
217213
};
218-
out.write_u32::<BigEndian>(offset)?;
214+
out.write_all(&offset.to_be_bytes())?;
219215
}
220216
Ok(())
221217
}
@@ -231,8 +227,6 @@ pub mod offsets {
231227
pub mod large_offsets {
232228
use std::ops::Range;
233229

234-
use byteorder::{BigEndian, WriteBytesExt};
235-
236230
use crate::{index::write::encode::LARGE_OFFSET_THRESHOLD, multi_index};
237231

238232
/// The id uniquely identifying the large offsets table (with 64 bit offsets)
@@ -267,7 +261,7 @@ pub mod large_offsets {
267261
.iter()
268262
.filter_map(|e| (e.pack_offset > LARGE_OFFSET_THRESHOLD).then(|| e.pack_offset))
269263
{
270-
out.write_u64::<BigEndian>(offset)?;
264+
out.write_all(&offset.to_be_bytes())?;
271265
num_large_offsets = num_large_offsets
272266
.checked_sub(1)
273267
.expect("BUG: wrote more offsets the previously found");

git-pack/src/multi_index/init.rs

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
use std::{convert::TryFrom, path::Path};
22

3-
use byteorder::{BigEndian, ByteOrder};
4-
53
use crate::multi_index::{chunk, File, Version};
64

75
mod error {
@@ -90,7 +88,7 @@ impl TryFrom<&Path> for File {
9088
let (_num_base_files, data) = data.split_at(1); // TODO: handle base files once it's clear what this does
9189

9290
let (num_indices, _) = data.split_at(4);
93-
let num_indices = BigEndian::read_u32(num_indices);
91+
let num_indices = crate::read_u32(num_indices);
9492

9593
(version, object_hash, num_chunks, num_indices)
9694
};

git-pack/src/multi_index/write.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ use std::{
55
time::{Instant, SystemTime},
66
};
77

8-
use byteorder::{BigEndian, WriteBytesExt};
98
use git_features::progress::Progress;
109

1110
use crate::multi_index;
@@ -217,7 +216,7 @@ impl multi_index::File {
217216
out.write_all(&[object_hash as u8])?;
218217
out.write_all(&[num_chunks])?;
219218
out.write_all(&[0])?; /* unused number of base files */
220-
out.write_u32::<BigEndian>(num_indices)?;
219+
out.write_all(&num_indices.to_be_bytes())?;
221220

222221
Ok(Self::HEADER_LEN)
223222
}

0 commit comments

Comments
 (0)