1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
use std::{borrow::Cow, convert::Infallible, ops::Range};

use crate::storage::{
    columns::{compression, raw_column},
    shift_range, ChunkType, Header, RawColumns,
};

pub(super) struct Args<'a, T: compression::ColumnCompression, DirArgs> {
    /// The original data of the entire document chunk (compressed or uncompressed)
    pub(super) original: Cow<'a, [u8]>,
    /// The number of bytes in the original before the beginning of the change column metadata
    pub(super) prefix: usize,
    /// The offset in the original after the end of the ops column data
    pub(super) suffix: usize,
    /// The column data for the changes
    pub(super) changes: Cols<T>,
    /// The column data for the ops
    pub(super) ops: Cols<T>,
    /// Additional arguments specific to the direction (compression or uncompression)
    pub(super) extra_args: DirArgs,
}

pub(super) struct CompressArgs {
    pub(super) threshold: usize,
    pub(super) original_header_len: usize,
}

/// Compress a document chunk returning the compressed bytes
pub(super) fn compress(args: Args<'_, compression::Uncompressed, CompressArgs>) -> Vec<u8> {
    let header_len = args.extra_args.original_header_len;
    let threshold = args.extra_args.threshold;
    // Wrap in a closure so we can use `?` in the construction but still force the compiler
    // to check that the error type is `Infallible`
    let result: Result<_, Infallible> = (|| {
        Ok(Compression::<Compressing, _>::new(
            args,
            Compressing {
                threshold,
                header_len,
            },
        )
        .changes()?
        .ops()?
        .write_data()
        .finish())
    })();
    // We just checked the error is `Infallible` so unwrap is fine
    result.unwrap()
}

pub(super) fn decompress<'a>(
    args: Args<'a, compression::Unknown, ()>,
) -> Result<Decompressed<'a>, raw_column::ParseError> {
    match (
        args.changes.raw_columns.uncompressed(),
        args.ops.raw_columns.uncompressed(),
    ) {
        (Some(changes), Some(ops)) => Ok(Decompressed {
            changes,
            ops,
            compressed: None,
            uncompressed: args.original,
            change_bytes: args.changes.data,
            op_bytes: args.ops.data,
        }),
        _ => Ok(
            Compression::<'a, Decompressing, _>::new(args, Decompressing)
                .changes()?
                .ops()?
                .write_data()
                .finish(),
        ),
    }
}

pub(super) struct Decompressed<'a> {
    /// The original compressed data, if there was any
    pub(super) compressed: Option<Cow<'a, [u8]>>,
    /// The final uncompressed data
    pub(super) uncompressed: Cow<'a, [u8]>,
    /// The ops column metadata
    pub(super) ops: RawColumns<compression::Uncompressed>,
    /// The change column metadata
    pub(super) changes: RawColumns<compression::Uncompressed>,
    /// The location of the change column data in the uncompressed data
    pub(super) change_bytes: Range<usize>,
    /// The location of the op column data in the uncompressed data
    pub(super) op_bytes: Range<usize>,
}

struct Compression<'a, D: Direction, S: CompressionState> {
    args: Args<'a, D::In, D::Args>,
    state: S,
    direction: D,
}

/// Some columns in the original data
pub(super) struct Cols<T: compression::ColumnCompression> {
    /// The metadata for these columns
    pub(super) raw_columns: RawColumns<T>,
    /// The location in the original chunk of the data for these columns
    pub(super) data: Range<usize>,
}

// Compression and decompression involve almost the same steps in either direction. This trait
// encapsulates that.
trait Direction: std::fmt::Debug {
    type Out: compression::ColumnCompression;
    type In: compression::ColumnCompression;
    type Error;
    type Args;

    /// This method represents the (de)compression process for a direction. The arguments are:
    ///
    /// * cols - The columns we are processing
    /// * input - the entire document chunk
    /// * out - the vector to place the processed columns in
    /// * meta_out - the vector to place processed column metadata in
    fn process(
        &self,
        cols: &Cols<Self::In>,
        input: &[u8],
        out: &mut Vec<u8>,
        meta_out: &mut Vec<u8>,
    ) -> Result<Cols<Self::Out>, Self::Error>;
}
#[derive(Debug)]
struct Compressing {
    threshold: usize,
    header_len: usize,
}

impl Direction for Compressing {
    type Error = Infallible;
    type Out = compression::Unknown;
    type In = compression::Uncompressed;
    type Args = CompressArgs;

    fn process(
        &self,
        cols: &Cols<Self::In>,
        input: &[u8],
        out: &mut Vec<u8>,
        meta_out: &mut Vec<u8>,
    ) -> Result<Cols<Self::Out>, Self::Error> {
        let start = out.len();
        let raw_columns = cols
            .raw_columns
            .compress(&input[cols.data.clone()], out, self.threshold);
        raw_columns.write(meta_out);
        Ok(Cols {
            data: start..out.len(),
            raw_columns,
        })
    }
}

#[derive(Debug)]
struct Decompressing;

impl Direction for Decompressing {
    type Error = raw_column::ParseError;
    type Out = compression::Uncompressed;
    type In = compression::Unknown;
    type Args = ();

    fn process(
        &self,
        cols: &Cols<Self::In>,
        input: &[u8],
        out: &mut Vec<u8>,
        meta_out: &mut Vec<u8>,
    ) -> Result<Cols<Self::Out>, raw_column::ParseError> {
        let start = out.len();
        let raw_columns = cols
            .raw_columns
            .uncompress(&input[cols.data.clone()], out)?;
        raw_columns.write(meta_out);
        Ok(Cols {
            data: start..out.len(),
            raw_columns,
        })
    }
}

// Somewhat absurdly I (alex) kept getting the order of writing ops and changes wrong as well as
// the order that column metadata vs data should be written in. This is a type state to get the
// compiler to enforce that things are done in the right order.
trait CompressionState {}
impl CompressionState for Starting {}
impl<D: Direction> CompressionState for Changes<D> {}
impl<D: Direction> CompressionState for ChangesAndOps<D> {}
impl<D: Direction> CompressionState for Finished<D> {}

/// We haven't done any processing yet
struct Starting {
    /// The vector to write column data to
    data_out: Vec<u8>,
    /// The vector to write column metadata to
    meta_out: Vec<u8>,
}

/// We've processed the changes columns
struct Changes<D: Direction> {
    /// The `Cols` for the processed change columns
    change_cols: Cols<D::Out>,
    /// The vector to write column metadata to
    meta_out: Vec<u8>,
    /// The vector to write column data to
    data_out: Vec<u8>,
}

/// We've processed the ops columns
struct ChangesAndOps<D: Direction> {
    /// The `Cols` for the processed change columns
    change_cols: Cols<D::Out>,
    /// The `Cols` for the processed op columns
    ops_cols: Cols<D::Out>,
    /// The vector to write column metadata to
    meta_out: Vec<u8>,
    /// The vector to write column data to
    data_out: Vec<u8>,
}

/// We've written the column metadata and the op metadata for changes and ops to the output buffer
/// and added the prefix and suffix from the args.
struct Finished<D: Direction> {
    /// The `Cols` for the processed change columns
    change_cols: Cols<D::Out>,
    /// The `Cols` for the processed op columns
    ops_cols: Cols<D::Out>,
    /// The start of the change column metadata in the processed chunk
    data_start: usize,
    /// The processed chunk
    out: Vec<u8>,
}

impl<'a, D: Direction> Compression<'a, D, Starting> {
    fn new(args: Args<'a, D::In, D::Args>, direction: D) -> Compression<'a, D, Starting> {
        let mut meta_out = Vec::with_capacity(args.original.len() * 2);
        meta_out.extend(&args.original[..args.prefix]);
        Compression {
            args,
            direction,
            state: Starting {
                meta_out,
                data_out: Vec::new(),
            },
        }
    }
}

impl<'a, D: Direction> Compression<'a, D, Starting> {
    fn changes(self) -> Result<Compression<'a, D, Changes<D>>, D::Error> {
        let Starting {
            mut data_out,
            mut meta_out,
        } = self.state;
        let change_cols = self.direction.process(
            &self.args.changes,
            &self.args.original,
            &mut data_out,
            &mut meta_out,
        )?;
        Ok(Compression {
            args: self.args,
            direction: self.direction,
            state: Changes {
                change_cols,
                meta_out,
                data_out,
            },
        })
    }
}

impl<'a, D: Direction> Compression<'a, D, Changes<D>> {
    fn ops(self) -> Result<Compression<'a, D, ChangesAndOps<D>>, D::Error> {
        let Changes {
            change_cols,
            mut meta_out,
            mut data_out,
        } = self.state;
        let ops_cols = self.direction.process(
            &self.args.ops,
            &self.args.original,
            &mut data_out,
            &mut meta_out,
        )?;
        Ok(Compression {
            args: self.args,
            direction: self.direction,
            state: ChangesAndOps {
                change_cols,
                ops_cols,
                meta_out,
                data_out,
            },
        })
    }
}

impl<'a, D: Direction> Compression<'a, D, ChangesAndOps<D>> {
    fn write_data(self) -> Compression<'a, D, Finished<D>> {
        let ChangesAndOps {
            data_out,
            mut meta_out,
            change_cols,
            ops_cols,
        } = self.state;
        let data_start = meta_out.len();
        meta_out.extend(&data_out);
        meta_out.extend(&self.args.original[self.args.suffix..]);
        Compression {
            args: self.args,
            direction: self.direction,
            state: Finished {
                ops_cols,
                change_cols,
                out: meta_out,
                data_start,
            },
        }
    }
}

impl<'a> Compression<'a, Decompressing, Finished<Decompressing>> {
    fn finish(self) -> Decompressed<'a> {
        let Finished {
            change_cols,
            ops_cols,
            data_start,
            out,
        } = self.state;
        Decompressed {
            ops: ops_cols.raw_columns,
            changes: change_cols.raw_columns,
            uncompressed: Cow::Owned(out),
            compressed: Some(self.args.original),
            change_bytes: shift_range(change_cols.data, data_start),
            op_bytes: shift_range(ops_cols.data, data_start),
        }
    }
}

impl<'a> Compression<'a, Compressing, Finished<Compressing>> {
    fn finish(self) -> Vec<u8> {
        let Finished { out, .. } = self.state;
        let headerless = &out[self.direction.header_len..];
        let header = Header::new(ChunkType::Document, headerless);
        let mut result = Vec::with_capacity(header.len() + out.len());
        header.write(&mut result);
        result.extend(headerless);
        result
    }
}