Files
adler32
aho_corasick
ansi_term
atty
backtrace
backtrace_sys
base64
base_x
bitflags
block_buffer
block_padding
bstr
byte_tools
byteorder
bytes
cargo_metadata
cargo_web
cfg_if
clap
cookie
cookie_store
crc32fast
crossbeam_channel
crossbeam_deque
crossbeam_epoch
crossbeam_queue
crossbeam_utils
digest
directories
dirs_sys
dtoa
either
embed_wasm
embed_wasm_build
encoding_rs
env_logger
error_chain
failure
failure_derive
fake_simd
filetime
flate2
fnv
foreign_types
foreign_types_shared
futures
futures_channel
futures_core
futures_cpupool
futures_sink
futures_task
futures_util
generic_array
getrandom
globset
h2
handlebars
headers
headers_core
heck
http
http_body
httparse
humantime
hyper
hyper_tls
idna
ignore
indexmap
inotify
inotify_sys
iovec
itoa
language_tags
lazy_static
lazycell
libc
libflate
lock_api
log
maplit
matches
maybe_uninit
memchr
memmap
memoffset
mime
mime_guess
miniz_oxide
mio
mio_extras
mio_uds
native_tls
net2
notify
num_cpus
opaque_debug
open
openssl
openssl_probe
openssl_sys
parity_wasm
parking_lot
parking_lot_core
pbr
percent_encoding
pest
pest_derive
pest_generator
pest_meta
phf
phf_codegen
phf_generator
phf_shared
pin_project
pin_project_internal
pin_project_lite
pin_utils
ppv_lite86
proc_macro2
publicsuffix
quick_error
quote
rand
rand_chacha
rand_core
rand_hc
rand_isaac
rand_jitter
rand_os
rand_pcg
rand_xorshift
regex
regex_syntax
remove_dir_all
reqwest
rle_decode_fast
rustc_demangle
ryu
safemem
same_file
scoped_tls
scopeguard
semver
semver_parser
serde
serde_derive
serde_json
serde_urlencoded
sha1
sha2
siphasher
slab
smallvec
string
strsim
structopt
structopt_derive
syn
synstructure
take_mut
tar
tempfile
termcolor
textwrap
thread_local
time
tokio
tokio_buf
tokio_codec
tokio_core
tokio_current_thread
tokio_executor
tokio_fs
tokio_io
tokio_reactor
tokio_sync
tokio_tcp
tokio_threadpool
tokio_timer
tokio_tls
tokio_udp
tokio_uds
tokio_util
toml
tower_service
traitobject
try_from
try_lock
typeable
typenum
ucd_trie
unicase
unicode_bidi
unicode_categories
unicode_normalization
unicode_segmentation
unicode_width
unicode_xid
url
uuid
vec_map
walkdir
want
websocket
xattr
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
use encode::{add_padding, encode_to_slice};
use std::{cmp, str};
use Config;

/// The output mechanism for ChunkedEncoder's encoded bytes.
pub trait Sink {
    type Error;

    /// Handle a chunk of encoded base64 data (as UTF-8 bytes)
    fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error>;
}

const BUF_SIZE: usize = 1024;

/// A base64 encoder that emits encoded bytes in chunks without heap allocation.
pub struct ChunkedEncoder {
    config: Config,
    max_input_chunk_len: usize,
}

impl ChunkedEncoder {
    pub fn new(config: Config) -> ChunkedEncoder {
        ChunkedEncoder {
            config,
            max_input_chunk_len: max_input_length(BUF_SIZE, config),
        }
    }

    pub fn encode<S: Sink>(&self, bytes: &[u8], sink: &mut S) -> Result<(), S::Error> {
        let mut encode_buf: [u8; BUF_SIZE] = [0; BUF_SIZE];
        let encode_table = self.config.char_set.encode_table();

        let mut input_index = 0;

        while input_index < bytes.len() {
            // either the full input chunk size, or it's the last iteration
            let input_chunk_len = cmp::min(self.max_input_chunk_len, bytes.len() - input_index);

            let chunk = &bytes[input_index..(input_index + input_chunk_len)];

            let mut b64_bytes_written = encode_to_slice(chunk, &mut encode_buf, encode_table);

            input_index += input_chunk_len;
            let more_input_left = input_index < bytes.len();

            if self.config.pad && !more_input_left {
                // no more input, add padding if needed. Buffer will have room because
                // max_input_length leaves room for it.
                b64_bytes_written += add_padding(bytes.len(), &mut encode_buf[b64_bytes_written..]);
            }

            sink.write_encoded_bytes(&encode_buf[0..b64_bytes_written])?;
        }

        Ok(())
    }
}

/// Calculate the longest input that can be encoded for the given output buffer size.
///
/// If the config requires padding, two bytes of buffer space will be set aside so that the last
/// chunk of input can be encoded safely.
///
/// The input length will always be a multiple of 3 so that no encoding state has to be carried over
/// between chunks.
fn max_input_length(encoded_buf_len: usize, config: Config) -> usize {
    let effective_buf_len = if config.pad {
        // make room for padding
        encoded_buf_len
            .checked_sub(2)
            .expect("Don't use a tiny buffer")
    } else {
        encoded_buf_len
    };

    // No padding, so just normal base64 expansion.
    (effective_buf_len / 4) * 3
}

// A really simple sink that just appends to a string
pub(crate) struct StringSink<'a> {
    string: &'a mut String,
}

impl<'a> StringSink<'a> {
    pub(crate) fn new(s: &mut String) -> StringSink {
        StringSink { string: s }
    }
}

impl<'a> Sink for StringSink<'a> {
    type Error = ();

    fn write_encoded_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
        self.string.push_str(str::from_utf8(s).unwrap());

        Ok(())
    }
}

#[cfg(test)]
pub mod tests {
    extern crate rand;

    use super::*;
    use tests::random_config;
    use *;

    use self::rand::distributions::{Distribution, Uniform};
    use self::rand::{FromEntropy, Rng};

    #[test]
    fn chunked_encode_empty() {
        assert_eq!("", chunked_encode_str(&[], STANDARD));
    }

    #[test]
    fn chunked_encode_intermediate_fast_loop() {
        // > 8 bytes input, will enter the pretty fast loop
        assert_eq!(
            "Zm9vYmFyYmF6cXV4",
            chunked_encode_str(b"foobarbazqux", STANDARD)
        );
    }

    #[test]
    fn chunked_encode_fast_loop() {
        // > 32 bytes input, will enter the uber fast loop
        assert_eq!(
            "Zm9vYmFyYmF6cXV4cXV1eGNvcmdlZ3JhdWx0Z2FycGx5eg==",
            chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", STANDARD)
        );
    }

    #[test]
    fn chunked_encode_slow_loop_only() {
        // < 8 bytes input, slow loop only
        assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", STANDARD));
    }

    #[test]
    fn chunked_encode_matches_normal_encode_random_string_sink() {
        let helper = StringSinkTestHelper;
        chunked_encode_matches_normal_encode_random(&helper);
    }

    #[test]
    fn max_input_length_no_pad() {
        let config = config_with_pad(false);
        assert_eq!(768, max_input_length(1024, config));
    }

    #[test]
    fn max_input_length_with_pad_decrements_one_triple() {
        let config = config_with_pad(true);
        assert_eq!(765, max_input_length(1024, config));
    }

    #[test]
    fn max_input_length_with_pad_one_byte_short() {
        let config = config_with_pad(true);
        assert_eq!(765, max_input_length(1025, config));
    }

    #[test]
    fn max_input_length_with_pad_fits_exactly() {
        let config = config_with_pad(true);
        assert_eq!(768, max_input_length(1026, config));
    }

    #[test]
    fn max_input_length_cant_use_extra_single_encoded_byte() {
        let config = Config::new(CharacterSet::Standard, false);
        assert_eq!(300, max_input_length(401, config));
    }

    pub fn chunked_encode_matches_normal_encode_random<S: SinkTestHelper>(sink_test_helper: &S) {
        let mut input_buf: Vec<u8> = Vec::new();
        let mut output_buf = String::new();
        let mut rng = rand::rngs::SmallRng::from_entropy();
        let input_len_range = Uniform::new(1, 10_000);

        for _ in 0..5_000 {
            input_buf.clear();
            output_buf.clear();

            let buf_len = input_len_range.sample(&mut rng);
            for _ in 0..buf_len {
                input_buf.push(rng.gen());
            }

            let config = random_config(&mut rng);

            let chunk_encoded_string = sink_test_helper.encode_to_string(config, &input_buf);
            encode_config_buf(&input_buf, config, &mut output_buf);

            assert_eq!(
                output_buf, chunk_encoded_string,
                "input len={}, config: pad={}",
                buf_len, config.pad
            );
        }
    }

    fn chunked_encode_str(bytes: &[u8], config: Config) -> String {
        let mut s = String::new();
        {
            let mut sink = StringSink::new(&mut s);
            let encoder = ChunkedEncoder::new(config);
            encoder.encode(bytes, &mut sink).unwrap();
        }

        return s;
    }

    fn config_with_pad(pad: bool) -> Config {
        Config::new(CharacterSet::Standard, pad)
    }

    // An abstraction around sinks so that we can have tests that easily to any sink implementation
    pub trait SinkTestHelper {
        fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String;
    }

    struct StringSinkTestHelper;

    impl SinkTestHelper for StringSinkTestHelper {
        fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String {
            let encoder = ChunkedEncoder::new(config);
            let mut s = String::new();
            {
                let mut sink = StringSink::new(&mut s);
                encoder.encode(bytes, &mut sink).unwrap();
            }

            s
        }
    }

}