RPM build fix (reverted CI changes which will need to be un-reverted or made conditional) and vendor Rust dependencies to make builds much faster in any CI system.

This commit is contained in:
Adam Ierymenko
2022-06-08 07:32:16 -04:00
parent 373ca30269
commit d5ca4e5f52
12611 changed files with 2898014 additions and 284 deletions

120
zeroidc/vendor/bytes/tests/test_buf.rs vendored Normal file
View File

@@ -0,0 +1,120 @@
#![warn(rust_2018_idioms)]
use bytes::Buf;
#[cfg(feature = "std")]
use std::io::IoSlice;
#[test]
fn test_fresh_cursor_vec() {
let mut buf = &b"hello"[..];
assert_eq!(buf.remaining(), 5);
assert_eq!(buf.chunk(), b"hello");
buf.advance(2);
assert_eq!(buf.remaining(), 3);
assert_eq!(buf.chunk(), b"llo");
buf.advance(3);
assert_eq!(buf.remaining(), 0);
assert_eq!(buf.chunk(), b"");
}
#[test]
fn test_get_u8() {
let mut buf = &b"\x21zomg"[..];
assert_eq!(0x21, buf.get_u8());
}
#[test]
fn test_get_u16() {
let mut buf = &b"\x21\x54zomg"[..];
assert_eq!(0x2154, buf.get_u16());
let mut buf = &b"\x21\x54zomg"[..];
assert_eq!(0x5421, buf.get_u16_le());
}
#[test]
#[should_panic]
fn test_get_u16_buffer_underflow() {
let mut buf = &b"\x21"[..];
buf.get_u16();
}
#[cfg(feature = "std")]
#[test]
fn test_bufs_vec() {
let buf = &b"hello world"[..];
let b1: &[u8] = &mut [];
let b2: &[u8] = &mut [];
let mut dst = [IoSlice::new(b1), IoSlice::new(b2)];
assert_eq!(1, buf.chunks_vectored(&mut dst[..]));
}
#[test]
fn test_vec_deque() {
use std::collections::VecDeque;
let mut buffer: VecDeque<u8> = VecDeque::new();
buffer.extend(b"hello world");
assert_eq!(11, buffer.remaining());
assert_eq!(b"hello world", buffer.chunk());
buffer.advance(6);
assert_eq!(b"world", buffer.chunk());
buffer.extend(b" piece");
let mut out = [0; 11];
buffer.copy_to_slice(&mut out);
assert_eq!(b"world piece", &out[..]);
}
#[test]
fn test_deref_buf_forwards() {
struct Special;
impl Buf for Special {
fn remaining(&self) -> usize {
unreachable!("remaining");
}
fn chunk(&self) -> &[u8] {
unreachable!("chunk");
}
fn advance(&mut self, _: usize) {
unreachable!("advance");
}
fn get_u8(&mut self) -> u8 {
// specialized!
b'x'
}
}
// these should all use the specialized method
assert_eq!(Special.get_u8(), b'x');
assert_eq!((&mut Special as &mut dyn Buf).get_u8(), b'x');
assert_eq!((Box::new(Special) as Box<dyn Buf>).get_u8(), b'x');
assert_eq!(Box::new(Special).get_u8(), b'x');
}
#[test]
fn copy_to_bytes_less() {
let mut buf = &b"hello world"[..];
let bytes = buf.copy_to_bytes(5);
assert_eq!(bytes, &b"hello"[..]);
assert_eq!(buf, &b" world"[..])
}
#[test]
#[should_panic]
fn copy_to_bytes_overflow() {
let mut buf = &b"hello world"[..];
let _bytes = buf.copy_to_bytes(12);
}

View File

@@ -0,0 +1,178 @@
#![warn(rust_2018_idioms)]
use bytes::buf::UninitSlice;
use bytes::{BufMut, BytesMut};
use core::fmt::Write;
use core::usize;
#[test]
fn test_vec_as_mut_buf() {
let mut buf = Vec::with_capacity(64);
assert_eq!(buf.remaining_mut(), isize::MAX as usize);
assert!(buf.chunk_mut().len() >= 64);
buf.put(&b"zomg"[..]);
assert_eq!(&buf, b"zomg");
assert_eq!(buf.remaining_mut(), isize::MAX as usize - 4);
assert_eq!(buf.capacity(), 64);
for _ in 0..16 {
buf.put(&b"zomg"[..]);
}
assert_eq!(buf.len(), 68);
}
#[test]
fn test_vec_put_bytes() {
let mut buf = Vec::new();
buf.push(17);
buf.put_bytes(19, 2);
assert_eq!([17, 19, 19], &buf[..]);
}
#[test]
fn test_put_u8() {
let mut buf = Vec::with_capacity(8);
buf.put_u8(33);
assert_eq!(b"\x21", &buf[..]);
}
#[test]
fn test_put_u16() {
let mut buf = Vec::with_capacity(8);
buf.put_u16(8532);
assert_eq!(b"\x21\x54", &buf[..]);
buf.clear();
buf.put_u16_le(8532);
assert_eq!(b"\x54\x21", &buf[..]);
}
#[test]
fn test_put_int() {
let mut buf = Vec::with_capacity(8);
buf.put_int(0x1020304050607080, 3);
assert_eq!(b"\x60\x70\x80", &buf[..]);
}
#[test]
#[should_panic]
fn test_put_int_nbytes_overflow() {
let mut buf = Vec::with_capacity(8);
buf.put_int(0x1020304050607080, 9);
}
#[test]
fn test_put_int_le() {
let mut buf = Vec::with_capacity(8);
buf.put_int_le(0x1020304050607080, 3);
assert_eq!(b"\x80\x70\x60", &buf[..]);
}
#[test]
#[should_panic]
fn test_put_int_le_nbytes_overflow() {
let mut buf = Vec::with_capacity(8);
buf.put_int_le(0x1020304050607080, 9);
}
#[test]
#[should_panic(expected = "cannot advance")]
fn test_vec_advance_mut() {
// Verify fix for #354
let mut buf = Vec::with_capacity(8);
unsafe {
buf.advance_mut(12);
}
}
#[test]
fn test_clone() {
let mut buf = BytesMut::with_capacity(100);
buf.write_str("this is a test").unwrap();
let buf2 = buf.clone();
buf.write_str(" of our emergency broadcast system").unwrap();
assert!(buf != buf2);
}
#[test]
fn test_mut_slice() {
let mut v = vec![0, 0, 0, 0];
let mut s = &mut v[..];
s.put_u32(42);
assert_eq!(s.len(), 0);
assert_eq!(&v, &[0, 0, 0, 42]);
}
#[test]
fn test_slice_put_bytes() {
let mut v = [0, 0, 0, 0];
let mut s = &mut v[..];
s.put_u8(17);
s.put_bytes(19, 2);
assert_eq!(1, s.remaining_mut());
assert_eq!(&[17, 19, 19, 0], &v[..]);
}
#[test]
fn test_deref_bufmut_forwards() {
struct Special;
unsafe impl BufMut for Special {
fn remaining_mut(&self) -> usize {
unreachable!("remaining_mut");
}
fn chunk_mut(&mut self) -> &mut UninitSlice {
unreachable!("chunk_mut");
}
unsafe fn advance_mut(&mut self, _: usize) {
unreachable!("advance");
}
fn put_u8(&mut self, _: u8) {
// specialized!
}
}
// these should all use the specialized method
Special.put_u8(b'x');
(&mut Special as &mut dyn BufMut).put_u8(b'x');
(Box::new(Special) as Box<dyn BufMut>).put_u8(b'x');
Box::new(Special).put_u8(b'x');
}
#[test]
#[should_panic]
fn write_byte_panics_if_out_of_bounds() {
let mut data = [b'b', b'a', b'r'];
let slice = unsafe { UninitSlice::from_raw_parts_mut(data.as_mut_ptr(), 3) };
slice.write_byte(4, b'f');
}
#[test]
#[should_panic]
fn copy_from_slice_panics_if_different_length_1() {
let mut data = [b'b', b'a', b'r'];
let slice = unsafe { UninitSlice::from_raw_parts_mut(data.as_mut_ptr(), 3) };
slice.copy_from_slice(b"a");
}
#[test]
#[should_panic]
fn copy_from_slice_panics_if_different_length_2() {
let mut data = [b'b', b'a', b'r'];
let slice = unsafe { UninitSlice::from_raw_parts_mut(data.as_mut_ptr(), 3) };
slice.copy_from_slice(b"abcd");
}

1004
zeroidc/vendor/bytes/tests/test_bytes.rs vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,69 @@
//! Test using `Bytes` with an allocator that hands out "odd" pointers for
//! vectors (pointers where the LSB is set).
#![cfg(not(miri))] // Miri does not support custom allocators (also, Miri is "odd" by default with 50% chance)
use std::alloc::{GlobalAlloc, Layout, System};
use std::ptr;
use bytes::Bytes;
#[global_allocator]
static ODD: Odd = Odd;
struct Odd;
unsafe impl GlobalAlloc for Odd {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
if layout.align() == 1 && layout.size() > 0 {
// Allocate slightly bigger so that we can offset the pointer by 1
let size = layout.size() + 1;
let new_layout = match Layout::from_size_align(size, 1) {
Ok(layout) => layout,
Err(_err) => return ptr::null_mut(),
};
let ptr = System.alloc(new_layout);
if !ptr.is_null() {
let ptr = ptr.offset(1);
ptr
} else {
ptr
}
} else {
System.alloc(layout)
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
if layout.align() == 1 && layout.size() > 0 {
let size = layout.size() + 1;
let new_layout = match Layout::from_size_align(size, 1) {
Ok(layout) => layout,
Err(_err) => std::process::abort(),
};
System.dealloc(ptr.offset(-1), new_layout);
} else {
System.dealloc(ptr, layout);
}
}
}
#[test]
fn sanity_check_odd_allocator() {
let vec = vec![33u8; 1024];
let p = vec.as_ptr() as usize;
assert!(p & 0x1 == 0x1, "{:#b}", p);
}
#[test]
fn test_bytes_from_vec_drop() {
let vec = vec![33u8; 1024];
let _b = Bytes::from(vec);
}
#[test]
fn test_bytes_clone_drop() {
let vec = vec![33u8; 1024];
let b1 = Bytes::from(vec);
let _b2 = b1.clone();
}

View File

@@ -0,0 +1,79 @@
use std::alloc::{GlobalAlloc, Layout, System};
use std::{mem, ptr};
use bytes::{Buf, Bytes};
#[global_allocator]
static LEDGER: Ledger = Ledger;
struct Ledger;
const USIZE_SIZE: usize = mem::size_of::<usize>();
unsafe impl GlobalAlloc for Ledger {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
if layout.align() == 1 && layout.size() > 0 {
// Allocate extra space to stash a record of
// how much space there was.
let orig_size = layout.size();
let size = orig_size + USIZE_SIZE;
let new_layout = match Layout::from_size_align(size, 1) {
Ok(layout) => layout,
Err(_err) => return ptr::null_mut(),
};
let ptr = System.alloc(new_layout);
if !ptr.is_null() {
(ptr as *mut usize).write(orig_size);
let ptr = ptr.offset(USIZE_SIZE as isize);
ptr
} else {
ptr
}
} else {
System.alloc(layout)
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
if layout.align() == 1 && layout.size() > 0 {
let off_ptr = (ptr as *mut usize).offset(-1);
let orig_size = off_ptr.read();
if orig_size != layout.size() {
panic!(
"bad dealloc: alloc size was {}, dealloc size is {}",
orig_size,
layout.size()
);
}
let new_layout = match Layout::from_size_align(layout.size() + USIZE_SIZE, 1) {
Ok(layout) => layout,
Err(_err) => std::process::abort(),
};
System.dealloc(off_ptr as *mut u8, new_layout);
} else {
System.dealloc(ptr, layout);
}
}
}
#[test]
fn test_bytes_advance() {
let mut bytes = Bytes::from(vec![10, 20, 30]);
bytes.advance(1);
drop(bytes);
}
#[test]
fn test_bytes_truncate() {
let mut bytes = Bytes::from(vec![10, 20, 30]);
bytes.truncate(2);
drop(bytes);
}
#[test]
fn test_bytes_truncate_and_advance() {
let mut bytes = Bytes::from(vec![10, 20, 30]);
bytes.truncate(2);
bytes.advance(1);
drop(bytes);
}

155
zeroidc/vendor/bytes/tests/test_chain.rs vendored Normal file
View File

@@ -0,0 +1,155 @@
#![warn(rust_2018_idioms)]
use bytes::{Buf, BufMut, Bytes};
#[cfg(feature = "std")]
use std::io::IoSlice;
#[test]
fn collect_two_bufs() {
let a = Bytes::from(&b"hello"[..]);
let b = Bytes::from(&b"world"[..]);
let res = a.chain(b).copy_to_bytes(10);
assert_eq!(res, &b"helloworld"[..]);
}
#[test]
fn writing_chained() {
let mut a = [0u8; 64];
let mut b = [0u8; 64];
{
let mut buf = (&mut a[..]).chain_mut(&mut b[..]);
for i in 0u8..128 {
buf.put_u8(i);
}
}
for i in 0..64 {
let expect = i as u8;
assert_eq!(expect, a[i]);
assert_eq!(expect + 64, b[i]);
}
}
#[test]
fn iterating_two_bufs() {
let a = Bytes::from(&b"hello"[..]);
let b = Bytes::from(&b"world"[..]);
let res: Vec<u8> = a.chain(b).into_iter().collect();
assert_eq!(res, &b"helloworld"[..]);
}
#[cfg(feature = "std")]
#[test]
fn vectored_read() {
let a = Bytes::from(&b"hello"[..]);
let b = Bytes::from(&b"world"[..]);
let mut buf = a.chain(b);
{
let b1: &[u8] = &mut [];
let b2: &[u8] = &mut [];
let b3: &[u8] = &mut [];
let b4: &[u8] = &mut [];
let mut iovecs = [
IoSlice::new(b1),
IoSlice::new(b2),
IoSlice::new(b3),
IoSlice::new(b4),
];
assert_eq!(2, buf.chunks_vectored(&mut iovecs));
assert_eq!(iovecs[0][..], b"hello"[..]);
assert_eq!(iovecs[1][..], b"world"[..]);
assert_eq!(iovecs[2][..], b""[..]);
assert_eq!(iovecs[3][..], b""[..]);
}
buf.advance(2);
{
let b1: &[u8] = &mut [];
let b2: &[u8] = &mut [];
let b3: &[u8] = &mut [];
let b4: &[u8] = &mut [];
let mut iovecs = [
IoSlice::new(b1),
IoSlice::new(b2),
IoSlice::new(b3),
IoSlice::new(b4),
];
assert_eq!(2, buf.chunks_vectored(&mut iovecs));
assert_eq!(iovecs[0][..], b"llo"[..]);
assert_eq!(iovecs[1][..], b"world"[..]);
assert_eq!(iovecs[2][..], b""[..]);
assert_eq!(iovecs[3][..], b""[..]);
}
buf.advance(3);
{
let b1: &[u8] = &mut [];
let b2: &[u8] = &mut [];
let b3: &[u8] = &mut [];
let b4: &[u8] = &mut [];
let mut iovecs = [
IoSlice::new(b1),
IoSlice::new(b2),
IoSlice::new(b3),
IoSlice::new(b4),
];
assert_eq!(1, buf.chunks_vectored(&mut iovecs));
assert_eq!(iovecs[0][..], b"world"[..]);
assert_eq!(iovecs[1][..], b""[..]);
assert_eq!(iovecs[2][..], b""[..]);
assert_eq!(iovecs[3][..], b""[..]);
}
buf.advance(3);
{
let b1: &[u8] = &mut [];
let b2: &[u8] = &mut [];
let b3: &[u8] = &mut [];
let b4: &[u8] = &mut [];
let mut iovecs = [
IoSlice::new(b1),
IoSlice::new(b2),
IoSlice::new(b3),
IoSlice::new(b4),
];
assert_eq!(1, buf.chunks_vectored(&mut iovecs));
assert_eq!(iovecs[0][..], b"ld"[..]);
assert_eq!(iovecs[1][..], b""[..]);
assert_eq!(iovecs[2][..], b""[..]);
assert_eq!(iovecs[3][..], b""[..]);
}
}
#[test]
fn chain_get_bytes() {
let mut ab = Bytes::copy_from_slice(b"ab");
let mut cd = Bytes::copy_from_slice(b"cd");
let ab_ptr = ab.as_ptr();
let cd_ptr = cd.as_ptr();
let mut chain = (&mut ab).chain(&mut cd);
let a = chain.copy_to_bytes(1);
let bc = chain.copy_to_bytes(2);
let d = chain.copy_to_bytes(1);
assert_eq!(Bytes::copy_from_slice(b"a"), a);
assert_eq!(Bytes::copy_from_slice(b"bc"), bc);
assert_eq!(Bytes::copy_from_slice(b"d"), d);
// assert `get_bytes` did not allocate
assert_eq!(ab_ptr, a.as_ptr());
// assert `get_bytes` did not allocate
assert_eq!(cd_ptr.wrapping_offset(1), d.as_ptr());
}

View File

@@ -0,0 +1,35 @@
#![warn(rust_2018_idioms)]
use bytes::Bytes;
#[test]
fn fmt() {
let vec: Vec<_> = (0..0x100).map(|b| b as u8).collect();
let expected = "b\"\
\\0\\x01\\x02\\x03\\x04\\x05\\x06\\x07\
\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f\
\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\
\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\
\x20!\\\"#$%&'()*+,-./0123456789:;<=>?\
@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_\
`abcdefghijklmnopqrstuvwxyz{|}~\\x7f\
\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\
\\x88\\x89\\x8a\\x8b\\x8c\\x8d\\x8e\\x8f\
\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\
\\x98\\x99\\x9a\\x9b\\x9c\\x9d\\x9e\\x9f\
\\xa0\\xa1\\xa2\\xa3\\xa4\\xa5\\xa6\\xa7\
\\xa8\\xa9\\xaa\\xab\\xac\\xad\\xae\\xaf\
\\xb0\\xb1\\xb2\\xb3\\xb4\\xb5\\xb6\\xb7\
\\xb8\\xb9\\xba\\xbb\\xbc\\xbd\\xbe\\xbf\
\\xc0\\xc1\\xc2\\xc3\\xc4\\xc5\\xc6\\xc7\
\\xc8\\xc9\\xca\\xcb\\xcc\\xcd\\xce\\xcf\
\\xd0\\xd1\\xd2\\xd3\\xd4\\xd5\\xd6\\xd7\
\\xd8\\xd9\\xda\\xdb\\xdc\\xdd\\xde\\xdf\
\\xe0\\xe1\\xe2\\xe3\\xe4\\xe5\\xe6\\xe7\
\\xe8\\xe9\\xea\\xeb\\xec\\xed\\xee\\xef\
\\xf0\\xf1\\xf2\\xf3\\xf4\\xf5\\xf6\\xf7\
\\xf8\\xf9\\xfa\\xfb\\xfc\\xfd\\xfe\\xff\"";
assert_eq!(expected, format!("{:?}", Bytes::from(vec)));
}

21
zeroidc/vendor/bytes/tests/test_iter.rs vendored Normal file
View File

@@ -0,0 +1,21 @@
#![warn(rust_2018_idioms)]
use bytes::Bytes;
#[test]
fn iter_len() {
let buf = Bytes::from_static(b"hello world");
let iter = buf.iter();
assert_eq!(iter.size_hint(), (11, Some(11)));
assert_eq!(iter.len(), 11);
}
#[test]
fn empty_iter_len() {
let buf = Bytes::from_static(b"");
let iter = buf.iter();
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.len(), 0);
}

View File

@@ -0,0 +1,29 @@
#![warn(rust_2018_idioms)]
#![cfg(feature = "std")]
use std::io::{BufRead, Read};
use bytes::Buf;
#[test]
fn read() {
let buf1 = &b"hello "[..];
let buf2 = &b"world"[..];
let buf = Buf::chain(buf1, buf2); // Disambiguate with Read::chain
let mut buffer = Vec::new();
buf.reader().read_to_end(&mut buffer).unwrap();
assert_eq!(b"hello world", &buffer[..]);
}
#[test]
fn buf_read() {
let buf1 = &b"hell"[..];
let buf2 = &b"o\nworld"[..];
let mut reader = Buf::chain(buf1, buf2).reader();
let mut line = String::new();
reader.read_line(&mut line).unwrap();
assert_eq!("hello\n", &line);
line.clear();
reader.read_line(&mut line).unwrap();
assert_eq!("world", &line);
}

View File

@@ -0,0 +1,20 @@
#![cfg(feature = "serde")]
#![warn(rust_2018_idioms)]
use serde_test::{assert_tokens, Token};
#[test]
fn test_ser_de_empty() {
let b = bytes::Bytes::new();
assert_tokens(&b, &[Token::Bytes(b"")]);
let b = bytes::BytesMut::with_capacity(0);
assert_tokens(&b, &[Token::Bytes(b"")]);
}
#[test]
fn test_ser_de() {
let b = bytes::Bytes::from(&b"bytes"[..]);
assert_tokens(&b, &[Token::Bytes(b"bytes")]);
let b = bytes::BytesMut::from(&b"bytes"[..]);
assert_tokens(&b, &[Token::Bytes(b"bytes")]);
}

32
zeroidc/vendor/bytes/tests/test_take.rs vendored Normal file
View File

@@ -0,0 +1,32 @@
#![warn(rust_2018_idioms)]
use bytes::buf::Buf;
use bytes::Bytes;
#[test]
fn long_take() {
// Tests that get a take with a size greater than the buffer length will not
// overrun the buffer. Regression test for #138.
let buf = b"hello world".take(100);
assert_eq!(11, buf.remaining());
assert_eq!(b"hello world", buf.chunk());
}
#[test]
fn take_copy_to_bytes() {
let mut abcd = Bytes::copy_from_slice(b"abcd");
let abcd_ptr = abcd.as_ptr();
let mut take = (&mut abcd).take(2);
let a = take.copy_to_bytes(1);
assert_eq!(Bytes::copy_from_slice(b"a"), a);
// assert `to_bytes` did not allocate
assert_eq!(abcd_ptr, a.as_ptr());
assert_eq!(Bytes::copy_from_slice(b"bcd"), abcd);
}
#[test]
#[should_panic]
fn take_copy_to_bytes_panics() {
let abcd = Bytes::copy_from_slice(b"abcd");
abcd.take(2).copy_to_bytes(3);
}