righor 0.2.4

Righor creates model of Ig/TCR sequences from sequencing data.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
use crate::shared::data_structures::{RangeArray1, RangeArray2};
use crate::shared::feature::Feature;
use crate::shared::utils::difference_as_i64;
use crate::shared::{DAlignment, Dna, FeaturesTrait, InferenceParameters, VJAlignment};

use crate::vdj::Sequence;

use itertools::iproduct;

/// Contains the probability of the V gene ending at position e_v
/// For all reasonnable e_v
pub struct AggregatedFeatureEndV {
    pub index: usize,      // store the index of the V gene
    pub start_gene: usize, // store the start of the sequence in the V

    // deal with the range of possible values for endV
    pub start_v3: i64,
    pub end_v3: i64,

    // Contains all the likelihood
    likelihood: RangeArray1,

    // Dirty likelihood (will be updated as we go through the inference)
    dirty_likelihood: RangeArray1,
}

pub struct AggregatedFeatureStartJ {
    pub index: usize,     // store the index of the j gene
    pub start_seq: usize, // store the start of the J in the sequence

    // deal with the range of possible values for startJ
    pub start_j5: i64,
    pub end_j5: i64,

    // Contains all the likelihood
    likelihood: RangeArray1,

    // Dirty likelihood (will be updated as we go through the inference)
    dirty_likelihood: RangeArray1,
}

// Contains the probability of the D gene starting and ending position
pub struct AggregatedFeatureSpanD {
    pub index: usize, // store the index of the D gene

    // range of possible values
    pub start_d5: i64,
    pub end_d5: i64,
    pub start_d3: i64,
    pub end_d3: i64,

    // Contains all the likelihood  P(startD, endD | D)
    likelihood: RangeArray2,
    // Dirty likelihood, will be updated as we go through the inference
    dirty_likelihood: RangeArray2,
}

impl AggregatedFeatureEndV {
    pub fn new(
        v: &VJAlignment,
        feat: &impl FeaturesTrait,
        ip: &InferenceParameters,
    ) -> Option<AggregatedFeatureEndV> {
        let mut likelihood = RangeArray1::zeros((
            difference_as_i64(v.end_seq, feat.delv().dim().0) + 1,
            v.end_seq as i64 + 1,
        ));
        let mut total_likelihood = 0.;
        for delv in 0..feat.delv().dim().0 {
            let v_end = difference_as_i64(v.end_seq, delv);
            let ll_delv = feat.delv().likelihood((delv, v.index));
            let ll_v_err = feat
                .error()
                .likelihood((v.nb_errors(delv), v.length_with_deletion(delv)));
            let ll = ll_delv * ll_v_err;
            if ll > ip.min_likelihood {
                *likelihood.get_mut(v_end) = ll;
                total_likelihood += ll;
            }
        }

        if total_likelihood == 0. {
            return None;
        }

        Some(AggregatedFeatureEndV {
            start_v3: likelihood.min,
            end_v3: likelihood.max,
            dirty_likelihood: RangeArray1::zeros(likelihood.dim()),
            likelihood,
            index: v.index,
            start_gene: v.start_gene,
        })
    }

    pub fn iter(&self) -> impl Iterator<Item = (i64, &f64)> + '_ {
        self.likelihood.iter().filter(|&(_, &v)| v != 0.0)
    }

    pub fn likelihood(&self, ev: i64) -> f64 {
        self.likelihood.get(ev)
    }

    pub fn max_likelihood(&self) -> f64 {
        self.likelihood.max_value()
    }

    pub fn dirty_update(&mut self, ev: i64, likelihood: f64) {
        *self.dirty_likelihood.get_mut(ev) += likelihood;
    }

    pub fn disaggregate(
        &self,
        v: &VJAlignment,
        feat: &mut impl FeaturesTrait,
        ip: &InferenceParameters,
    ) {
        for delv in 0..feat.delv().dim().0 {
            let v_end = difference_as_i64(v.end_seq, delv);
            let ll = feat.delv().likelihood((delv, v.index))
                * feat
                    .error()
                    .likelihood((v.nb_errors(delv), v.length_with_deletion(delv)));

            if ll > ip.min_likelihood {
                let dirty_proba = self.dirty_likelihood.get(v_end); // P(ev)
                if dirty_proba > 0. {
                    // here we want to compute P(delV | V)
                    // at that point the V gene proba should be already updated
                    feat.delv_mut().dirty_update((delv, v.index), dirty_proba);

                    feat.error_mut().dirty_update(
                        (v.nb_errors(delv), v.length_with_deletion(delv)),
                        dirty_proba,
                    );
                }
            }
        }
    }
}

impl AggregatedFeatureStartJ {
    pub fn new(
        j: &VJAlignment,
        feat: &impl FeaturesTrait,
        ip: &InferenceParameters,
    ) -> Option<AggregatedFeatureStartJ> {
        let mut likelihood = RangeArray1::zeros((
            j.start_seq as i64,
            (j.start_seq + feat.delj().dim().0) as i64,
        ));

        let mut total_likelihood = 0.;
        for delj in 0..feat.delj().dim().0 {
            let j_start = (j.start_seq + delj) as i64;
            let ll_delj = feat.delj().likelihood((delj, j.index));
            let ll_errj = feat
                .error()
                .likelihood((j.nb_errors(delj), j.length_with_deletion(delj)));
            let ll = ll_delj * ll_errj;
            if ll > ip.min_likelihood {
                *likelihood.get_mut(j_start) = ll;
                total_likelihood += ll;
            }
        }

        if total_likelihood == 0. {
            return None;
        }
        Some(AggregatedFeatureStartJ {
            start_j5: likelihood.min,
            end_j5: likelihood.max,
            dirty_likelihood: RangeArray1::zeros(likelihood.dim()),
            likelihood,

            index: j.index,
            start_seq: j.start_seq,
        })
    }

    pub fn iter(&self) -> impl Iterator<Item = (i64, &f64)> + '_ {
        self.likelihood.iter().filter(|&(_, &v)| v != 0.0)
    }

    pub fn likelihood(&self, sj: i64) -> f64 {
        self.likelihood.get(sj)
    }

    pub fn max_likelihood(&self) -> f64 {
        self.likelihood.max_value()
    }

    pub fn dirty_update(&mut self, sj: i64, likelihood: f64) {
        *self.dirty_likelihood.get_mut(sj) += likelihood;
    }

    pub fn disaggregate(
        &self,
        j: &VJAlignment,
        feat: &mut impl FeaturesTrait,
        ip: &InferenceParameters,
    ) {
        for delj in 0..feat.delj().dim().0 {
            let j_start = (j.start_seq + delj) as i64;
            let ll = feat.delj().likelihood((delj, j.index))
                * feat
                    .error()
                    .likelihood((j.nb_errors(delj), j.length_with_deletion(delj)));
            if ll > ip.min_likelihood {
                let dirty_proba = self.dirty_likelihood.get(j_start);
                if dirty_proba > 0. {
                    feat.delj_mut().dirty_update((delj, j.index), dirty_proba);

                    feat.error_mut().dirty_update(
                        (j.nb_errors(delj), j.length_with_deletion(delj)),
                        dirty_proba,
                    )
                }
            }
        }
    }
}

impl AggregatedFeatureSpanD {
    pub fn new(
        ds: &Vec<DAlignment>,
        feat: &impl FeaturesTrait,
        ip: &InferenceParameters,
    ) -> Option<AggregatedFeatureSpanD> {
        if ds.is_empty() {
            return None;
        }

        let mut total_likelihood = 0.;
        let mut likelihoods = RangeArray2::zeros((
            (
                // min start, min end
                ds.iter().map(|x| x.pos).min().unwrap() as i64,
                ds.iter().map(|x| x.pos + x.len()).min().unwrap() as i64
                    - feat.deld().dim().1 as i64
                    + 1,
            ),
            (
                // max start, max end
                ds.iter().map(|x| x.pos).max().unwrap() as i64 + feat.deld().dim().0 as i64,
                ds.iter().map(|x| x.pos + x.len()).max().unwrap() as i64 + 1,
            ),
        ));

        let dindex = ds.first().unwrap().index;
        for d in ds {
            if d.index != dindex {
                panic!("AggregatedFeatureSpanD received different genes.");
            }

            for (deld5, deld3) in iproduct!(0..feat.deld().dim().0, 0..feat.deld().dim().1) {
                let d_start = (d.pos + deld5) as i64;
                let d_end = (d.pos + d.len() - deld3) as i64;
                if d_start > d_end {
                    continue;
                }

                let ll_deld = feat.deld().likelihood((deld5, deld3, d.index));
                let ll_errord = feat.error().likelihood((
                    d.nb_errors(deld5, deld3),
                    d.length_with_deletion(deld5, deld3),
                ));
                let likelihood = ll_deld * ll_errord;

                if likelihood > ip.min_likelihood {
                    *likelihoods.get_mut((d_start, d_end)) += likelihood;
                    total_likelihood += likelihood;
                }
            }
        }

        if total_likelihood == 0. {
            return None;
        }

        Some(AggregatedFeatureSpanD {
            start_d5: likelihoods.min.0,
            end_d5: likelihoods.max.0,
            start_d3: likelihoods.min.1,
            end_d3: likelihoods.max.1,
            dirty_likelihood: RangeArray2::zeros(likelihoods.dim()),
            likelihood: likelihoods,
            index: dindex,
        })
    }

    pub fn likelihood(&self, sd: i64, ed: i64) -> f64 {
        self.likelihood.get((sd, ed))
    }

    pub fn iter(&self) -> impl Iterator<Item = (i64, i64, &f64)> + '_ {
        self.likelihood.iter() //.filter(|&(_, _, &v)| v != 0.0)
    }

    pub fn iter_fixed_dend(&self, dend: i64) -> impl Iterator<Item = (i64, &f64)> + '_ {
        let iteropt = if (dend < self.likelihood.min.1) || (dend >= self.likelihood.max.1) {
            None
        } else {
            Some(self.likelihood.iter_fixed_2nd(dend))
        };
        iteropt.into_iter().flatten()
        //  .filter(|&(_, &v)| v != 0.0)
    }

    pub fn max_likelihood(&self) -> f64 {
        self.likelihood.max_value()
    }

    pub fn dirty_update(&mut self, sd: i64, ed: i64, likelihood: f64) {
        *self.dirty_likelihood.get_mut((sd, ed)) += likelihood;
    }

    pub fn disaggregate(
        &self,
        ds: &[DAlignment],
        feat: &mut impl FeaturesTrait,
        ip: &InferenceParameters,
    ) {
        // Now with startD and end D
        for d in ds.iter() {
            for (deld5, deld3) in iproduct!(0..feat.deld().dim().0, 0..feat.deld().dim().1) {
                let d_start = (d.pos + deld5) as i64;
                let d_end = (d.pos + d.len() - deld3) as i64;
                if d_start > d_end {
                    continue;
                }
                let nb_err = d.nb_errors(deld5, deld3);
                let likelihood = feat.deld().likelihood((deld5, deld3, d.index))
                    * feat
                        .error()
                        .likelihood((nb_err, d.length_with_deletion(deld5, deld3)));

                if likelihood > ip.min_likelihood {
                    let dirty_proba = self.dirty_likelihood.get((d_start, d_end));
                    let corrected_proba =
                        dirty_proba * likelihood / self.likelihood(d_start, d_end);
                    if dirty_proba > 0. {
                        feat.deld_mut()
                            .dirty_update((deld5, deld3, d.index), corrected_proba);

                        feat.error_mut().dirty_update(
                            (nb_err, d.length_with_deletion(deld5, deld3)),
                            corrected_proba,
                        );
                    }
                }
            }
        }
    }
}

pub struct FeatureVD {
    likelihood: RangeArray2,
    dirty_likelihood: RangeArray2,
}

impl FeatureVD {
    pub fn new(
        sequence: &Sequence,
        feat: &impl FeaturesTrait,
        ip: &InferenceParameters,
    ) -> Option<FeatureVD> {
        if sequence.v_genes.is_empty() || sequence.d_genes.is_empty() {
            return None;
        }
        let min_end_v = sequence.v_genes.iter().map(|x| x.end_seq).min().unwrap() as i64
            - feat.delv().dim().0 as i64
            + 1;
        let min_start_d = sequence.d_genes.iter().map(|x| x.pos).min().unwrap() as i64;
        let max_end_v = sequence.v_genes.iter().map(|x| x.end_seq).max().unwrap() as i64;
        let max_start_d = sequence.d_genes.iter().map(|x| x.pos).max().unwrap() as i64
            + feat.deld().dim().0 as i64
            - 1;

        let mut likelihoods =
            RangeArray2::zeros(((min_end_v, min_start_d), (max_end_v + 1, max_start_d + 1)));

        for ev in min_end_v..=max_end_v {
            for sd in min_start_d..=max_start_d {
                if sd >= ev && ((sd - ev) as usize) < feat.insvd().max_nb_insertions() {
                    let ins_vd_plus_first = sequence.get_subsequence(ev - 1, sd);
                    let likelihood = feat.insvd().likelihood(&ins_vd_plus_first);
                    if likelihood > ip.min_likelihood {
                        *likelihoods.get_mut((ev, sd)) = likelihood;
                    }
                }
            }
        }

        Some(FeatureVD {
            dirty_likelihood: RangeArray2::zeros(likelihoods.dim()),
            likelihood: likelihoods,
        })
    }

    pub fn iter(&self) -> impl Iterator<Item = (i64, i64, &f64)> + '_ {
        self.likelihood.iter().filter(|&(_, _, &v)| v != 0.0)
    }

    pub fn max_ev(&self) -> i64 {
        self.likelihood.max.0
    }

    pub fn max_likelihood(&self) -> f64 {
        self.likelihood.max_value()
    }

    pub fn min_ev(&self) -> i64 {
        self.likelihood.min.0
    }

    pub fn max_sd(&self) -> i64 {
        self.likelihood.max.1
    }

    pub fn min_sd(&self) -> i64 {
        self.likelihood.min.1
    }

    pub fn likelihood(&self, ev: i64, sd: i64) -> f64 {
        self.likelihood.get((ev, sd))
    }

    pub fn dirty_update(&mut self, ev: i64, sd: i64, likelihood: f64) {
        *self.dirty_likelihood.get_mut((ev, sd)) += likelihood;
    }

    pub fn disaggregate(
        &self,
        sequence: &Dna,
        feat: &mut impl FeaturesTrait,
        ip: &InferenceParameters,
    ) {
        for ev in self.likelihood.lower().0..self.likelihood.upper().0 {
            for sd in self.likelihood.lower().1..self.likelihood.upper().1 {
                if sd >= ev
                    && ((sd - ev) as usize) < feat.insvd().max_nb_insertions()
                    && self.likelihood(ev, sd) > ip.min_likelihood
                {
                    let ins_vd_plus_first = &sequence.extract_padded_subsequence(ev - 1, sd);
                    let likelihood = self.likelihood(ev, sd);
                    if likelihood > ip.min_likelihood {
                        feat.insvd_mut()
                            .dirty_update(ins_vd_plus_first, self.dirty_likelihood.get((ev, sd)))
                    }
                }
            }
        }
    }
}

pub struct FeatureDJ {
    likelihood: RangeArray2,
    dirty_likelihood: RangeArray2,
}

impl FeatureDJ {
    pub fn new(
        sequence: &Sequence,
        feat: &impl FeaturesTrait,
        ip: &InferenceParameters,
    ) -> Option<FeatureDJ> {
        if sequence.d_genes.is_empty() || sequence.j_genes.is_empty() {
            return None;
        }

        let min_end_d = sequence
            .d_genes
            .iter()
            .map(|x| x.pos + x.len())
            .min()
            .unwrap() as i64
            - feat.deld().dim().1 as i64
            + 1;
        let min_start_j = sequence.j_genes.iter().map(|x| x.start_seq).min().unwrap() as i64;
        let max_end_d = sequence
            .d_genes
            .iter()
            .map(|x| x.pos + x.len())
            .max()
            .unwrap() as i64;
        let max_start_j = sequence.j_genes.iter().map(|x| x.start_seq).max().unwrap() as i64
            + feat.delj().dim().0 as i64
            - 1;

        let mut likelihoods =
            RangeArray2::zeros(((min_end_d, min_start_j), (max_end_d + 1, max_start_j + 1)));

        for ed in min_end_d..=max_end_d {
            for sj in min_start_j..=max_start_j {
                if sj >= ed && ((sj - ed) as usize) < feat.insdj().max_nb_insertions() {
                    // careful we need to reverse ins_dj for the inference
                    let mut ins_dj_plus_last = sequence.get_subsequence(ed, sj + 1);
                    ins_dj_plus_last.reverse();
                    let likelihood = feat.insdj().likelihood(&ins_dj_plus_last);
                    if likelihood > ip.min_likelihood {
                        *likelihoods.get_mut((ed, sj)) = likelihood;
                    }
                }
            }
        }

        Some(FeatureDJ {
            dirty_likelihood: RangeArray2::zeros(likelihoods.dim()),
            likelihood: likelihoods,
        })
    }

    pub fn likelihood(&self, ed: i64, sj: i64) -> f64 {
        self.likelihood.get((ed, sj))
    }

    pub fn max_likelihood(&self) -> f64 {
        self.likelihood.max_value()
    }

    pub fn iter(&self) -> impl Iterator<Item = (i64, i64, &f64)> + '_ {
        self.likelihood.iter().filter(|&(_, _, &v)| v != 0.0)
    }

    pub fn max_ed(&self) -> i64 {
        self.likelihood.max.0
    }

    pub fn min_ed(&self) -> i64 {
        self.likelihood.min.0
    }

    pub fn max_sj(&self) -> i64 {
        self.likelihood.max.1
    }

    pub fn min_sj(&self) -> i64 {
        self.likelihood.min.1
    }

    pub fn dirty_update(&mut self, ed: i64, sj: i64, likelihood: f64) {
        *self.dirty_likelihood.get_mut((ed, sj)) += likelihood;
    }

    pub fn disaggregate(
        &self,
        sequence: &Dna,
        feat: &mut impl FeaturesTrait,
        ip: &InferenceParameters,
    ) {
        for ed in self.likelihood.lower().0..self.likelihood.upper().0 {
            for sj in self.likelihood.lower().1..self.likelihood.upper().1 {
                if sj >= ed
                    && ((sj - ed) as usize) < feat.insdj().max_nb_insertions()
                    && (self.dirty_likelihood.get((ed, sj)) > 0.)
                {
                    let mut ins_dj_plus_last = sequence.extract_padded_subsequence(ed, sj + 1);
                    ins_dj_plus_last.reverse();
                    let likelihood = feat.insdj().likelihood(&ins_dj_plus_last);
                    if likelihood > ip.min_likelihood {
                        feat.insdj_mut()
                            .dirty_update(&ins_dj_plus_last, self.dirty_likelihood.get((ed, sj)));
                    }
                }
            }
        }
    }
}