create src

This commit is contained in:
awfixer
2026-03-11 02:04:19 -07:00
commit 52f7a22bf2
2595 changed files with 402870 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
pub type Result = std::result::Result<(), Box<dyn std::error::Error>>;
pub fn assert_err_display<T: std::fmt::Debug, E: std::error::Error>(
res: std::result::Result<T, E>,
expected: impl AsRef<str>,
) {
match res {
Ok(v) => panic!("Expected error '{}', got value {:?}", expected.as_ref(), v),
Err(err) => assert_eq!(err.to_string(), expected.as_ref()),
}
}
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
mod decode;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
mod encode;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
mod read;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
mod write;

View File

@@ -0,0 +1,20 @@
pub type Result = std::result::Result<(), Box<dyn std::error::Error>>;
pub fn assert_err_display<T: std::fmt::Debug, E: std::error::Error>(
res: std::result::Result<T, E>,
expected: impl AsRef<str>,
) {
match res {
Ok(v) => panic!("Expected error '{}', got value {:?}", expected.as_ref(), v),
Err(err) => assert_eq!(err.to_string(), expected.as_ref()),
}
}
#[cfg(feature = "blocking-io")]
mod decode;
#[cfg(feature = "blocking-io")]
mod encode;
#[cfg(feature = "blocking-io")]
mod read;
#[cfg(feature = "blocking-io")]
mod write;

View File

@@ -0,0 +1,169 @@
mod streaming {
use gix_packetline::{
decode::{self, streaming, Stream},
ErrorRef, PacketLineRef,
};
use crate::assert_err_display;
fn assert_complete(
res: Result<Stream, decode::Error>,
expected_consumed: usize,
expected_value: PacketLineRef,
) -> crate::Result {
match res? {
Stream::Complete { line, bytes_consumed } => {
assert_eq!(bytes_consumed, expected_consumed);
assert_eq!(line.as_bstr(), expected_value.as_bstr());
}
Stream::Incomplete { .. } => panic!("expected parsing to be complete, not partial"),
}
Ok(())
}
mod round_trip {
use bstr::ByteSlice;
use gix_packetline::{decode, decode::streaming, Channel, PacketLineRef};
use crate::decode::streaming::assert_complete;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::encode as encode_io;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::encode as encode_io;
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn trailing_line_feeds_are_removed_explicitly() -> crate::Result {
let line = decode::all_at_once(b"0006a\n")?;
assert_eq!(line.as_text().expect("text").0.as_bstr(), b"a".as_bstr());
let mut out = Vec::new();
encode_io::write_text(&line.as_text().expect("text"), &mut out)
.await
.expect("write to memory works");
assert_eq!(out, b"0006a\n", "it appends a newline in text mode");
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn all_kinds_of_packetlines() -> crate::Result {
for (line, bytes) in &[
(PacketLineRef::ResponseEnd, 4),
(PacketLineRef::Delimiter, 4),
(PacketLineRef::Flush, 4),
(PacketLineRef::Data(b"hello there"), 15),
] {
let mut out = Vec::new();
encode_io::write_packet_line(line, &mut out).await?;
assert_complete(streaming(&out), *bytes, *line)?;
}
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn error_line() -> crate::Result {
let mut out = Vec::new();
encode_io::write_error(
&PacketLineRef::Data(b"the error").as_error().expect("data line"),
&mut out,
)
.await?;
let line = decode::all_at_once(&out)?;
assert_eq!(line.check_error().expect("err").0, b"the error");
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn side_bands() -> crate::Result {
for channel in &[Channel::Data, Channel::Error, Channel::Progress] {
let mut out = Vec::new();
let band = PacketLineRef::Data(b"band data")
.as_band(*channel)
.expect("data is valid for band");
encode_io::write_band(&band, &mut out).await?;
let line = decode::all_at_once(&out)?;
assert_eq!(line.decode_band().expect("valid band"), band);
}
Ok(())
}
}
#[test]
fn flush() -> crate::Result {
assert_complete(streaming(b"0000someotherstuff"), 4, PacketLineRef::Flush)
}
#[test]
fn trailing_line_feeds_are_not_removed_automatically() -> crate::Result {
assert_complete(streaming(b"0006a\n"), 6, PacketLineRef::Data(b"a\n"))
}
#[test]
fn ignore_extra_bytes() -> crate::Result {
assert_complete(streaming(b"0006a\nhello"), 6, PacketLineRef::Data(b"a\n"))
}
#[test]
fn error_on_oversized_line() {
assert_err_display(
streaming(b"ffff"),
"The data received claims to be larger than the maximum allowed size: got 65535, exceeds 65516",
);
}
#[test]
fn error_on_error_line() -> crate::Result {
let line = PacketLineRef::Data(b"ERR the error");
assert_complete(
streaming(b"0011ERR the error-and just ignored because not part of the size"),
17,
line,
)?;
assert_eq!(
line.check_error().expect("error to be parsed here"),
ErrorRef(b"the error")
);
Ok(())
}
#[test]
fn error_on_invalid_hex() {
assert_err_display(
streaming(b"fooo"),
"Failed to decode the first four hex bytes indicating the line length: Invalid character",
);
}
#[test]
fn error_on_empty_line() {
assert_err_display(streaming(b"0004"), "Received an invalid empty line");
}
mod incomplete {
use gix_packetline::decode::{self, streaming, Stream};
fn assert_incomplete(res: Result<Stream, decode::Error>, expected_missing: usize) -> crate::Result {
match res? {
Stream::Complete { .. } => {
panic!("expected parsing to be partial, not complete");
}
Stream::Incomplete { bytes_needed } => {
assert_eq!(bytes_needed, expected_missing);
}
}
Ok(())
}
#[test]
fn missing_hex_bytes() -> crate::Result {
assert_incomplete(streaming(b"0"), 3)?;
assert_incomplete(streaming(b"00"), 2)?;
Ok(())
}
#[test]
fn missing_data_bytes() -> crate::Result {
assert_incomplete(streaming(b"0005"), 1)?;
assert_incomplete(streaming(b"0006a"), 1)?;
Ok(())
}
}
}

View File

@@ -0,0 +1,114 @@
mod data_to_write {
#[cfg(feature = "blocking-io")]
use std::io;
use bstr::ByteSlice;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use futures_lite::io;
use crate::assert_err_display;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::encode::data_to_write;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::encode::data_to_write;
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn binary_and_non_binary() -> crate::Result {
let mut out = Vec::new();
let res = data_to_write(b"\0", &mut out).await?;
assert_eq!(res, 5);
assert_eq!(out.as_bstr(), b"0005\0".as_bstr());
out.clear();
let res = data_to_write("hello world, it works\n".as_bytes(), &mut out).await?;
assert_eq!(res, 26);
assert_eq!(out.as_bstr(), b"001ahello world, it works\n".as_bstr());
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn error_if_data_exceeds_limit() {
fn vec_sized(size: usize) -> Vec<u8> {
vec![0; size]
}
let res = data_to_write(&vec_sized(65516 + 1), io::sink()).await;
assert_err_display(res, "Cannot encode more than 65516 bytes, got 65517");
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn error_if_data_is_empty() {
assert_err_display(data_to_write(&[], io::sink()).await, "Empty lines are invalid");
}
}
mod text_to_write {
use bstr::ByteSlice;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::encode::text_to_write;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::encode::text_to_write;
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn always_appends_a_newline() -> crate::Result {
let mut out = Vec::new();
let res = text_to_write(b"a", &mut out).await?;
assert_eq!(res, 6);
assert_eq!(out.as_bstr(), b"0006a\n".as_bstr());
out.clear();
let res = text_to_write(b"a\n", &mut out).await?;
assert_eq!(res, 7);
assert_eq!(
out.as_bstr(),
b"0007a\n\n".as_bstr(),
"newline must be appended, as the receiving end is likely to remove it"
);
Ok(())
}
}
mod error {
use bstr::ByteSlice;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::encode::error_to_write;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::encode::error_to_write;
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn write_line() -> crate::Result {
let mut out = Vec::new();
let res = error_to_write(b"hello error", &mut out).await?;
assert_eq!(res, 19);
assert_eq!(out.as_bstr(), b"0013ERR hello error".as_bstr());
Ok(())
}
}
mod flush_delim_response_end {
use bstr::ByteSlice;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::encode::{delim_to_write, flush_to_write, response_end_to_write};
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::encode::{delim_to_write, flush_to_write, response_end_to_write};
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn success_flush_delim_response_end() -> crate::Result {
let mut out = Vec::new();
let res = flush_to_write(&mut out).await?;
assert_eq!(res, 4);
assert_eq!(out.as_bstr(), b"0000".as_bstr());
out.clear();
let res = delim_to_write(&mut out).await?;
assert_eq!(res, 4);
assert_eq!(out.as_bstr(), b"0001".as_bstr());
out.clear();
let res = response_end_to_write(&mut out).await?;
assert_eq!(res, 4);
assert_eq!(out.as_bstr(), b"0002".as_bstr());
Ok(())
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,234 @@
mod sideband;
pub mod streaming_peek_iter {
use std::{io, path::PathBuf};
use bstr::ByteSlice;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::StreamingPeekableIter;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::StreamingPeekableIter;
use gix_packetline::PacketLineRef;
fn fixture_path(path: &str) -> PathBuf {
PathBuf::from("tests/fixtures").join(path)
}
pub fn fixture_bytes(path: &str) -> Vec<u8> {
std::fs::read(fixture_path(path)).expect("readable fixture")
}
fn first_line() -> PacketLineRef<'static> {
PacketLineRef::Data(b"7814e8a05a59c0cf5fb186661d1551c75d1299b5 HEAD\0multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed symref=HEAD:refs/heads/master object-format=sha1 agent=git/2.28.0\n")
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek_follows_read_line_delimiter_logic() -> crate::Result {
let mut rd = StreamingPeekableIter::new(&b"0005a00000005b"[..], &[PacketLineRef::Flush], false);
let res = rd.peek_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Data(b"a"));
rd.read_line().await;
let res = rd.peek_line().await;
assert!(res.is_none(), "we hit the delimiter, and thus are EOF");
assert_eq!(
rd.stopped_at(),
Some(PacketLineRef::Flush),
"Stopped tracking is done even when peeking"
);
let res = rd.peek_line().await;
assert!(res.is_none(), "we are still done, no way around it");
rd.reset();
let res = rd.peek_line().await;
assert_eq!(
res.expect("line")??,
PacketLineRef::Data(b"b"),
"after resetting, we get past the delimiter"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek_follows_read_line_err_logic() -> crate::Result {
let mut rd = StreamingPeekableIter::new(&b"0005a0009ERR e0000"[..], &[PacketLineRef::Flush], false);
rd.fail_on_err_lines(true);
let res = rd.peek_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Data(b"a"));
rd.read_line().await;
let res = rd.peek_line().await;
assert_eq!(
res.expect("line").unwrap_err().to_string(),
"e",
"io errors are used to communicate remote errors when peeking"
);
let res = rd.peek_line().await;
assert!(res.is_none(), "we are still done, no way around it");
assert_eq!(rd.stopped_at(), None, "we stopped not because of a delimiter");
rd.reset();
let res = rd.peek_line().await;
assert!(res.is_none(), "it should stop due to the delimiter");
assert_eq!(
rd.stopped_at(),
Some(PacketLineRef::Flush),
"Stopped tracking is done even when peeking"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek_eof_is_none() -> crate::Result {
let mut rd = StreamingPeekableIter::new(&b"0005a0009ERR e0000"[..], &[PacketLineRef::Flush], false);
rd.fail_on_err_lines(false);
let res = rd.peek_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Data(b"a"));
rd.read_line().await;
let res = rd.peek_line().await;
assert_eq!(
res.expect("line")??,
PacketLineRef::Data(b"ERR e"),
"we read the ERR but it's not interpreted as such"
);
rd.read_line().await;
let res = rd.peek_line().await;
assert!(res.is_none(), "we peek into the flush packet, which is EOF");
assert_eq!(rd.stopped_at(), Some(PacketLineRef::Flush));
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek_non_data() -> crate::Result {
let mut rd = StreamingPeekableIter::new(&b"000000010002"[..], &[PacketLineRef::ResponseEnd], false);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Flush);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Delimiter);
rd.reset_with(&[PacketLineRef::Flush]);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::ResponseEnd);
for _ in 0..2 {
let res = rd.peek_line().await;
assert_eq!(
res.expect("error").unwrap_err().kind(),
std::io::ErrorKind::UnexpectedEof,
"peeks on error/eof repeat the error"
);
}
assert_eq!(
rd.stopped_at(),
None,
"The reader is configured to ignore ResponseEnd, and thus hits the end of stream"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn fail_on_err_lines() -> crate::Result {
let input = b"00010009ERR e0002";
let mut rd = StreamingPeekableIter::new(&input[..], &[], false);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Delimiter);
let res = rd.read_line().await;
assert_eq!(
res.expect("line")??.as_bstr(),
Some(b"ERR e".as_bstr()),
"by default no special handling"
);
let mut rd = StreamingPeekableIter::new(&input[..], &[], false);
rd.fail_on_err_lines(true);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Delimiter);
let res = rd.read_line().await;
assert_eq!(
res.expect("line").unwrap_err().to_string(),
"e",
"io errors are used to communicate remote errors"
);
let res = rd.read_line().await;
assert!(res.is_none(), "iteration is done after the first error");
rd.replace(input);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, PacketLineRef::Delimiter);
let res = rd.read_line().await;
assert_eq!(
res.expect("line")??.as_bstr(),
Some(b"ERR e".as_bstr()),
"a 'replace' also resets error handling to the default: false"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek() -> crate::Result {
let bytes = fixture_bytes("v1/fetch/01-many-refs.response");
let mut rd = StreamingPeekableIter::new(&bytes[..], &[PacketLineRef::Flush], false);
let res = rd.peek_line().await;
assert_eq!(res.expect("line")??, first_line(), "peek returns first line");
let res = rd.peek_line().await;
assert_eq!(
res.expect("line")??,
first_line(),
"peeked lines are never exhausted, unless they are finally read"
);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, first_line(), "read_line returns the peek once");
let res = rd.read_line().await;
assert_eq!(
res.expect("line")??.as_bstr(),
Some(b"7814e8a05a59c0cf5fb186661d1551c75d1299b5 refs/heads/master\n".as_bstr()),
"the next read_line returns the next line"
);
let res = rd.peek_line().await;
assert_eq!(
res.expect("line")??.as_bstr(),
Some(b"7814e8a05a59c0cf5fb186661d1551c75d1299b5 refs/remotes/origin/HEAD\n".as_bstr()),
"peek always gets the next line verbatim"
);
let res = exhaust(&mut rd).await;
assert_eq!(res, 1559);
assert_eq!(
rd.stopped_at(),
Some(PacketLineRef::Flush),
"A flush packet line ends every pack file"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn read_from_file_and_reader_advancement() -> crate::Result {
let mut bytes = fixture_bytes("v1/fetch/01-many-refs.response");
bytes.extend(fixture_bytes("v1/fetch/01-many-refs.response"));
let mut rd = StreamingPeekableIter::new(&bytes[..], &[PacketLineRef::Flush], false);
let res = rd.read_line().await;
assert_eq!(res.expect("line")??, first_line());
let res = exhaust(&mut rd).await;
assert_eq!(res + 1, 1561, "it stops after seeing the flush byte");
rd.reset();
let res = exhaust(&mut rd).await;
assert_eq!(
res, 1561,
"it should read the second part of the identical file from the previously advanced reader"
);
// this reset is will cause actual io::Errors to occur
rd.reset();
let res = rd.read_line().await;
assert_eq!(
res.expect("some error").unwrap_err().kind(),
io::ErrorKind::UnexpectedEof,
"trying to keep reading from exhausted input results in Some() containing the original error"
);
Ok(())
}
#[maybe_async::maybe_async]
async fn exhaust(rd: &mut StreamingPeekableIter<&[u8]>) -> i32 {
let mut count = 0;
while rd.read_line().await.is_some() {
count += 1;
}
count
}
}

View File

@@ -0,0 +1,278 @@
#[cfg(feature = "blocking-io")]
use std::io::Read;
use bstr::{BString, ByteSlice};
#[cfg(all(not(feature = "blocking-io"), feature = "async-io"))]
use futures_lite::io::AsyncReadExt;
use gix_odb::pack;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::StreamingPeekableIter;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::StreamingPeekableIter;
use gix_packetline::{read::ProgressAction, PacketLineRef};
use crate::read::streaming_peek_iter::fixture_bytes;
#[cfg(all(not(feature = "blocking-io"), feature = "async-io"))]
mod util {
use std::{io::Result, pin::Pin};
use futures_io::{AsyncBufRead, AsyncRead};
use futures_lite::{future, AsyncBufReadExt, AsyncReadExt};
pub struct BlockOn<T>(pub T);
impl<T: AsyncRead + Unpin> std::io::Read for BlockOn<T> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
future::block_on(self.0.read(buf))
}
}
impl<T: AsyncBufRead + Unpin> std::io::BufRead for BlockOn<T> {
fn fill_buf(&mut self) -> Result<&[u8]> {
future::block_on(self.0.fill_buf())
}
fn consume(&mut self, amt: usize) {
Pin::new(&mut self.0).consume(amt);
}
}
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn read_pack_with_progress_extraction() -> crate::Result {
let buf = fixture_bytes("v1/01-clone.combined-output");
let mut rd = StreamingPeekableIter::new(&buf[..], &[PacketLineRef::Flush], false);
// Read without sideband decoding
let mut out = Vec::new();
rd.as_read().read_to_end(&mut out).await?;
assert_eq!(out.as_bstr(), b"808e50d724f604f69ab93c6da2919c014667bedb HEAD\0multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed symref=HEAD:refs/heads/master object-format=sha1 agent=git/2.28.0\n808e50d724f604f69ab93c6da2919c014667bedb refs/heads/master\n".as_bstr());
let res = rd.read_line().await;
assert_eq!(
res.expect("line")??.as_text().expect("data line").0.as_bstr(),
b"NAK".as_bstr()
);
let mut seen_texts = Vec::<BString>::new();
let mut do_nothing = |is_err: bool, data: &[u8]| -> ProgressAction {
assert!(!is_err);
seen_texts.push(data.as_bstr().into());
std::ops::ControlFlow::Continue(())
};
let pack_read = rd.as_read_with_sidebands(&mut do_nothing);
#[cfg(all(not(feature = "blocking-io"), feature = "async-io"))]
let mut pack_entries = pack::data::input::BytesToEntriesIter::new_from_header(
util::BlockOn(pack_read),
pack::data::input::Mode::Verify,
pack::data::input::EntryDataMode::Ignore,
gix_hash::Kind::Sha1,
)?;
#[cfg(feature = "blocking-io")]
let mut pack_entries = pack::data::input::BytesToEntriesIter::new_from_header(
pack_read,
pack::data::input::Mode::Verify,
pack::data::input::EntryDataMode::Ignore,
gix_hash::Kind::Sha1,
)?;
let all_but_last = pack_entries.size_hint().0 - 1;
let last = pack_entries.nth(all_but_last).expect("last entry")?;
drop(pack_entries);
assert_eq!(
last.trailer
.expect("trailer to exist on last entry")
.to_hex()
.to_string(),
"150a1045f04dc0fc2dbf72313699fda696bf4126"
);
assert_eq!(
seen_texts,
[
"Enumerating objects: 3, done.",
"Counting objects: 33% (1/3)\r",
"Counting objects: 66% (2/3)\r",
"Counting objects: 100% (3/3)\r",
"Counting objects: 100% (3/3), done.",
"Total 3 (delta 0), reused 0 (delta 0), pack-reused 0"
]
.iter()
.map(|v| v.as_bytes().as_bstr().to_owned())
.collect::<Vec<_>>()
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn read_line_trait_method_reads_one_packet_line_at_a_time() -> crate::Result {
let buf = fixture_bytes("v1/01-clone.combined-output-no-binary");
let mut rd = StreamingPeekableIter::new(&buf[..], &[PacketLineRef::Flush], false);
let mut out = String::new();
let mut r = rd.as_read();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "808e50d724f604f69ab93c6da2919c014667bedb HEAD\0multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed symref=HEAD:refs/heads/master object-format=sha1 agent=git/2.28.0\n");
out.clear();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "808e50d724f604f69ab93c6da2919c014667bedb refs/heads/master\n");
out.clear();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "", "flush means empty lines…");
out.clear();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "", "…which can't be overcome unless the reader is reset");
assert_eq!(
r.stopped_at(),
Some(PacketLineRef::Flush),
"it knows what stopped the reader"
);
drop(r);
rd.reset();
let mut r = rd.as_read();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "NAK\n");
drop(r);
let mut r = rd.as_read_with_sidebands(|_, _| std::ops::ControlFlow::Continue(()));
out.clear();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "&");
out.clear();
r.read_line_to_string(&mut out).await?;
assert_eq!(out, "");
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn readline_reads_one_packet_line_at_a_time() -> crate::Result {
let buf = fixture_bytes("v1/01-clone.combined-output-no-binary");
let mut rd = StreamingPeekableIter::new(&buf[..], &[PacketLineRef::Flush], false);
let mut r = rd.as_read();
let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap();
assert_eq!(line, "808e50d724f604f69ab93c6da2919c014667bedb HEAD\0multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed symref=HEAD:refs/heads/master object-format=sha1 agent=git/2.28.0\n");
let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap();
assert_eq!(line, "808e50d724f604f69ab93c6da2919c014667bedb refs/heads/master\n");
let line = r.read_data_line().await;
assert!(line.is_none(), "flush means `None`");
let line = r.read_data_line().await;
assert!(line.is_none(), "…which can't be overcome unless the reader is reset");
assert_eq!(
r.stopped_at(),
Some(PacketLineRef::Flush),
"it knows what stopped the reader"
);
drop(r);
rd.reset();
let mut r = rd.as_read();
let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap();
assert_eq!(line.as_bstr(), "NAK\n");
drop(r);
let mut r = rd.as_read_with_sidebands(|_, _| std::ops::ControlFlow::Continue(()));
let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap();
assert_eq!(
line.as_bstr(),
"\x02Enumerating objects: 3, done.\n",
"sidebands are ignored entirely here"
);
for _ in 0..6 {
let _discard_more_progress = r.read_data_line().await.unwrap()??.as_bstr().unwrap();
}
let line = r.read_data_line().await;
assert!(line.is_none(), "and we have reached the end");
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek_past_an_actual_eof_is_an_error() -> crate::Result {
let input = b"0009ERR e";
let mut rd = StreamingPeekableIter::new(&input[..], &[], false);
let mut reader = rd.as_read();
let res = reader.peek_data_line().await;
assert_eq!(res.expect("one line")??, b"ERR e");
let mut buf = String::new();
reader.read_line_to_string(&mut buf).await?;
assert_eq!(
buf, "ERR e",
"by default ERR lines won't propagate as failure but are merely text"
);
let res = reader.peek_data_line().await;
assert_eq!(
res.expect("an err").expect_err("foo").kind(),
std::io::ErrorKind::UnexpectedEof,
"peeking past the end is not an error as the caller should make sure we don't try 'invalid' reads"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn peek_past_a_delimiter_is_no_error() -> crate::Result {
let input = b"0009hello0000";
let mut rd = StreamingPeekableIter::new(&input[..], &[PacketLineRef::Flush], false);
let mut reader = rd.as_read();
let res = reader.peek_data_line().await;
assert_eq!(res.expect("one line")??, b"hello");
let mut buf = String::new();
reader.read_line_to_string(&mut buf).await?;
assert_eq!(buf, "hello");
let res = reader.peek_data_line().await;
assert!(
res.is_none(),
"peeking past a flush packet is a 'natural' event that should not cause an error"
);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn handling_of_err_lines() {
let input = b"0009ERR e0009ERR x0000";
let mut rd = StreamingPeekableIter::new(&input[..], &[], false);
rd.fail_on_err_lines(true);
let mut buf = [0u8; 2];
let mut reader = rd.as_read();
let res = reader.read(buf.as_mut()).await;
let err = res.unwrap_err();
assert_eq!(err.to_string(), "e", "it respects errors and passes them on");
assert_eq!(
err.into_inner()
.expect("inner err")
.downcast::<gix_packetline::read::Error>()
.expect("it's this type")
.message,
"e",
);
let res = reader.read(buf.as_mut()).await;
assert_eq!(
res.expect("read to succeed - EOF"),
0,
"it stops reading after an error despite there being more to read"
);
reader.reset_with(&[PacketLineRef::Flush]);
let res = reader.read(buf.as_mut()).await;
assert_eq!(
res.unwrap_err().to_string(),
"x",
"after a reset it continues reading, but retains the 'fail_on_err_lines' setting"
);
assert_eq!(
reader.stopped_at(),
None,
"An error can also be the reason, which is not distinguishable from an EOF"
);
}

View File

@@ -0,0 +1,61 @@
#[cfg(feature = "blocking-io")]
use std::io::Write;
use bstr::ByteSlice;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use futures_lite::prelude::*;
#[cfg(all(feature = "async-io", not(feature = "blocking-io")))]
use gix_packetline::async_io::Writer;
#[cfg(all(feature = "blocking-io", not(feature = "async-io")))]
use gix_packetline::blocking_io::Writer;
const MAX_DATA_LEN: usize = 65516;
const MAX_LINE_LEN: usize = 4 + MAX_DATA_LEN;
#[allow(clippy::unused_io_amount)] // under test
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn each_write_results_in_one_line() -> crate::Result {
let mut w = Writer::new(Vec::new());
w.write_all(b"hello").await?;
w.write(b"world!").await?;
let buf = w.into_inner();
assert_eq!(buf.as_bstr(), b"0009hello000aworld!".as_bstr());
Ok(())
}
#[allow(clippy::unused_io_amount)] // under test
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn write_text_and_write_binary() -> crate::Result {
let buf = {
let mut w = Writer::new(Vec::new());
w.enable_text_mode();
w.write_all(b"hello").await?;
w.enable_binary_mode();
w.write(b"world").await?;
w.into_inner()
};
assert_eq!(buf.as_bstr(), b"000ahello\n0009world".as_bstr());
Ok(())
}
#[allow(clippy::unused_io_amount)] // under test
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn huge_writes_are_split_into_lines() -> crate::Result {
let buf = {
let data = vec![0u8; MAX_DATA_LEN * 2];
let mut w = Writer::new(Vec::new());
w.write(&data).await?;
w.into_inner()
};
assert_eq!(buf.len(), MAX_LINE_LEN * 2);
Ok(())
}
#[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))]
async fn empty_writes_fail_with_error() {
let res = Writer::new(Vec::new()).write(&[]).await;
assert_eq!(
res.unwrap_err().to_string(),
"empty packet lines are not permitted as '0004' is invalid"
);
}