2
2
// Consider unify the read2() in libstd, cargo and this to prevent further code duplication.
3
3
4
4
pub use self :: imp:: read2;
5
- use std:: io;
5
+ use std:: io:: { self , Write } ;
6
+ use std:: mem:: replace;
6
7
use std:: process:: { Child , Output } ;
7
8
8
9
pub fn read2_abbreviated ( mut child : Child , exclude_from_len : & [ String ] ) -> io:: Result < Output > {
9
- use io:: Write ;
10
- use std:: mem:: replace;
11
-
12
- const HEAD_LEN : usize = 160 * 1024 ;
13
- const TAIL_LEN : usize = 256 * 1024 ;
14
- const EXCLUDED_PLACEHOLDER_LEN : isize = 32 ;
15
-
16
- enum ProcOutput {
17
- Full { bytes : Vec < u8 > , excluded_len : isize } ,
18
- Abbreviated { head : Vec < u8 > , skipped : usize , tail : Box < [ u8 ] > } ,
19
- }
20
-
21
- impl ProcOutput {
22
- fn extend ( & mut self , data : & [ u8 ] , exclude_from_len : & [ String ] ) {
23
- let new_self = match * self {
24
- ProcOutput :: Full { ref mut bytes, ref mut excluded_len } => {
25
- let old_len = bytes. len ( ) ;
26
- bytes. extend_from_slice ( data) ;
27
-
28
- // We had problems in the past with tests failing only in some environments,
29
- // due to the length of the base path pushing the output size over the limit.
30
- //
31
- // To make those failures deterministic across all environments we ignore known
32
- // paths when calculating the string length, while still including the full
33
- // path in the output. This could result in some output being larger than the
34
- // threshold, but it's better than having nondeterministic failures.
35
- //
36
- // The compiler emitting only excluded strings is addressed by adding a
37
- // placeholder size for each excluded segment, which will eventually reach
38
- // the configured threshold.
39
- for pattern in exclude_from_len {
40
- let pattern_bytes = pattern. as_bytes ( ) ;
41
- // We start matching `pattern_bytes - 1` into the previously loaded data,
42
- // to account for the fact a pattern might be included across multiple
43
- // `extend` calls. Starting from `- 1` avoids double-counting patterns.
44
- let matches = ( & bytes[ ( old_len. saturating_sub ( pattern_bytes. len ( ) - 1 ) ) ..] )
45
- . windows ( pattern_bytes. len ( ) )
46
- . filter ( |window| window == & pattern_bytes)
47
- . count ( ) ;
48
- * excluded_len += matches as isize
49
- * ( EXCLUDED_PLACEHOLDER_LEN - pattern_bytes. len ( ) as isize ) ;
50
- }
51
-
52
- let new_len = bytes. len ( ) ;
53
- if ( new_len as isize + * excluded_len) as usize <= HEAD_LEN + TAIL_LEN {
54
- return ;
55
- }
56
-
57
- let mut head = replace ( bytes, Vec :: new ( ) ) ;
58
- let tail = head. split_off ( new_len - TAIL_LEN ) . into_boxed_slice ( ) ;
59
- let skipped = new_len - HEAD_LEN - TAIL_LEN ;
60
- ProcOutput :: Abbreviated { head, skipped, tail }
61
- }
62
- ProcOutput :: Abbreviated { ref mut skipped, ref mut tail, .. } => {
63
- * skipped += data. len ( ) ;
64
- if data. len ( ) <= TAIL_LEN {
65
- tail[ ..data. len ( ) ] . copy_from_slice ( data) ;
66
- tail. rotate_left ( data. len ( ) ) ;
67
- } else {
68
- tail. copy_from_slice ( & data[ ( data. len ( ) - TAIL_LEN ) ..] ) ;
69
- }
70
- return ;
71
- }
72
- } ;
73
- * self = new_self;
74
- }
75
-
76
- fn into_bytes ( self ) -> Vec < u8 > {
77
- match self {
78
- ProcOutput :: Full { bytes, .. } => bytes,
79
- ProcOutput :: Abbreviated { mut head, skipped, tail } => {
80
- write ! ( & mut head, "\n \n <<<<<< SKIPPED {} BYTES >>>>>>\n \n " , skipped) . unwrap ( ) ;
81
- head. extend_from_slice ( & tail) ;
82
- head
83
- }
84
- }
85
- }
86
- }
87
-
88
- let mut stdout = ProcOutput :: Full { bytes : Vec :: new ( ) , excluded_len : 0 } ;
89
- let mut stderr = ProcOutput :: Full { bytes : Vec :: new ( ) , excluded_len : 0 } ;
10
+ let mut stdout = ProcOutput :: new ( ) ;
11
+ let mut stderr = ProcOutput :: new ( ) ;
90
12
91
13
drop ( child. stdin . take ( ) ) ;
92
14
read2 (
@@ -102,6 +24,86 @@ pub fn read2_abbreviated(mut child: Child, exclude_from_len: &[String]) -> io::R
102
24
Ok ( Output { status, stdout : stdout. into_bytes ( ) , stderr : stderr. into_bytes ( ) } )
103
25
}
104
26
27
+ const HEAD_LEN : usize = 160 * 1024 ;
28
+ const TAIL_LEN : usize = 256 * 1024 ;
29
+ const EXCLUDED_PLACEHOLDER_LEN : isize = 32 ;
30
+
31
+ enum ProcOutput {
32
+ Full { bytes : Vec < u8 > , excluded_len : isize } ,
33
+ Abbreviated { head : Vec < u8 > , skipped : usize , tail : Box < [ u8 ] > } ,
34
+ }
35
+
36
+ impl ProcOutput {
37
+ fn new ( ) -> Self {
38
+ ProcOutput :: Full { bytes : Vec :: new ( ) , excluded_len : 0 }
39
+ }
40
+
41
+ fn extend ( & mut self , data : & [ u8 ] , exclude_from_len : & [ String ] ) {
42
+ let new_self = match * self {
43
+ ProcOutput :: Full { ref mut bytes, ref mut excluded_len } => {
44
+ let old_len = bytes. len ( ) ;
45
+ bytes. extend_from_slice ( data) ;
46
+
47
+ // We had problems in the past with tests failing only in some environments,
48
+ // due to the length of the base path pushing the output size over the limit.
49
+ //
50
+ // To make those failures deterministic across all environments we ignore known
51
+ // paths when calculating the string length, while still including the full
52
+ // path in the output. This could result in some output being larger than the
53
+ // threshold, but it's better than having nondeterministic failures.
54
+ //
55
+ // The compiler emitting only excluded strings is addressed by adding a
56
+ // placeholder size for each excluded segment, which will eventually reach
57
+ // the configured threshold.
58
+ for pattern in exclude_from_len {
59
+ let pattern_bytes = pattern. as_bytes ( ) ;
60
+ // We start matching `pattern_bytes - 1` into the previously loaded data,
61
+ // to account for the fact a pattern might be included across multiple
62
+ // `extend` calls. Starting from `- 1` avoids double-counting patterns.
63
+ let matches = ( & bytes[ ( old_len. saturating_sub ( pattern_bytes. len ( ) - 1 ) ) ..] )
64
+ . windows ( pattern_bytes. len ( ) )
65
+ . filter ( |window| window == & pattern_bytes)
66
+ . count ( ) ;
67
+ * excluded_len += matches as isize
68
+ * ( EXCLUDED_PLACEHOLDER_LEN - pattern_bytes. len ( ) as isize ) ;
69
+ }
70
+
71
+ let new_len = bytes. len ( ) ;
72
+ if ( new_len as isize + * excluded_len) as usize <= HEAD_LEN + TAIL_LEN {
73
+ return ;
74
+ }
75
+
76
+ let mut head = replace ( bytes, Vec :: new ( ) ) ;
77
+ let tail = head. split_off ( new_len - TAIL_LEN ) . into_boxed_slice ( ) ;
78
+ let skipped = new_len - HEAD_LEN - TAIL_LEN ;
79
+ ProcOutput :: Abbreviated { head, skipped, tail }
80
+ }
81
+ ProcOutput :: Abbreviated { ref mut skipped, ref mut tail, .. } => {
82
+ * skipped += data. len ( ) ;
83
+ if data. len ( ) <= TAIL_LEN {
84
+ tail[ ..data. len ( ) ] . copy_from_slice ( data) ;
85
+ tail. rotate_left ( data. len ( ) ) ;
86
+ } else {
87
+ tail. copy_from_slice ( & data[ ( data. len ( ) - TAIL_LEN ) ..] ) ;
88
+ }
89
+ return ;
90
+ }
91
+ } ;
92
+ * self = new_self;
93
+ }
94
+
95
+ fn into_bytes ( self ) -> Vec < u8 > {
96
+ match self {
97
+ ProcOutput :: Full { bytes, .. } => bytes,
98
+ ProcOutput :: Abbreviated { mut head, skipped, tail } => {
99
+ write ! ( & mut head, "\n \n <<<<<< SKIPPED {} BYTES >>>>>>\n \n " , skipped) . unwrap ( ) ;
100
+ head. extend_from_slice ( & tail) ;
101
+ head
102
+ }
103
+ }
104
+ }
105
+ }
106
+
105
107
#[ cfg( not( any( unix, windows) ) ) ]
106
108
mod imp {
107
109
use std:: io:: { self , Read } ;
0 commit comments