1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
//! Applies a Dropout layer to the input data `x`
//!
//! The variables are:
//!
//! - `y`: output value
//! - `x`: input value
//! - `p`: dropout probability

use crate::capnp_util::*;
use crate::co::{IBackend, SharedTensor};
use crate::conn;
use crate::juice_capnp::dropout_config as capnp_config;
use crate::layer::*;
use crate::util::ArcLock;
use std::rc::Rc;

#[derive(Debug, Clone)]
/// [Dropout](./index.html) Layer
pub struct Dropout<T, B: conn::Dropout<T>> {
    probability: f32,
    seed: u64,
    dropout_config: Vec<Rc<B::CDROP>>,
}

impl<T, B: conn::Dropout<T>> Dropout<T, B> {
    /// Create a Dropout layer from a DropoutConfig.
    pub fn from_config(config: &DropoutConfig) -> Dropout<T, B> {
        Dropout {
            // TODO consider moving to vec
            probability: config.probability,
            // TODO consider moving to vec
            seed: config.seed,
            dropout_config: vec![],
        }
    }
}

//
// Dropout
//
impl<B: IBackend + conn::Dropout<f32>> ILayer<B> for Dropout<f32, B> {
    impl_ilayer_common!();

    fn reshape(
        &mut self,
        backend: ::std::rc::Rc<B>,
        input_data: &mut Vec<ArcLock<SharedTensor<f32>>>,
        input_gradient: &mut Vec<ArcLock<SharedTensor<f32>>>,
        weights_data: &mut Vec<ArcLock<SharedTensor<f32>>>,
        weights_gradient: &mut Vec<ArcLock<SharedTensor<f32>>>,
        output_data: &mut Vec<ArcLock<SharedTensor<f32>>>,
        output_gradient: &mut Vec<ArcLock<SharedTensor<f32>>>,
    ) {
        for i in 0..input_data.len() {
            let inp = input_data[0].read().unwrap();
            let input_desc = inp.desc();
            input_gradient[0].write().unwrap().resize(input_desc).unwrap();
            output_data[0].write().unwrap().resize(input_desc).unwrap();
            output_gradient[0].write().unwrap().resize(input_desc).unwrap();

            let config = backend.new_dropout_config(self.probability, self.seed).unwrap();
            self.dropout_config.push(Rc::new(config));
        }
    }
}

impl<B: IBackend + conn::Dropout<f32>> ComputeOutput<f32, B> for Dropout<f32, B> {
    fn compute_output(
        &self,
        backend: &B,
        _weights: &[&SharedTensor<f32>],
        input_data: &[&SharedTensor<f32>],
        output_data: &mut [&mut SharedTensor<f32>],
    ) {
        let config = &self.dropout_config[0];
        backend.dropout(input_data[0], output_data[0], &*config).unwrap();
    }
}

impl<B: IBackend + conn::Dropout<f32>> ComputeInputGradient<f32, B> for Dropout<f32, B> {
    fn compute_input_gradient(
        &self,
        backend: &B,
        weights_data: &[&SharedTensor<f32>],
        output_data: &[&SharedTensor<f32>],
        output_gradients: &[&SharedTensor<f32>],
        input_data: &[&SharedTensor<f32>],
        input_gradients: &mut [&mut SharedTensor<f32>],
    ) {
        let dropout_config = &self.dropout_config[0];
        backend
            .dropout_grad(
                output_data[0],
                output_gradients[0],
                input_data[0],
                input_gradients[0],
                dropout_config,
            )
            .unwrap()
    }
}

impl<B: IBackend + conn::Dropout<f32>> ComputeParametersGradient<f32, B> for Dropout<f32, B> {}

#[derive(Debug, Copy, Clone)]
/// Specifies configuration parameters for a Dropout Layer.
pub struct DropoutConfig {
    /// The probability to clamp a value to zero
    pub probability: f32,
    /// The initial seed for the (pseudo-)random generator
    pub seed: u64,
}

impl Into<LayerType> for DropoutConfig {
    fn into(self) -> LayerType {
        LayerType::Dropout(self)
    }
}

impl<'a> CapnpWrite<'a> for DropoutConfig {
    type Builder = capnp_config::Builder<'a>;

    /// Write the DropoutConfig into a capnp message.
    fn write_capnp(&self, builder: &mut Self::Builder) {
        builder.reborrow().set_probability(self.probability);
        builder.reborrow().set_seed(self.seed);
    }
}

impl<'a> CapnpRead<'a> for DropoutConfig {
    type Reader = capnp_config::Reader<'a>;

    fn read_capnp(reader: Self::Reader) -> Self {
        let probability: f32 = reader.get_probability();
        let seed: u64 = reader.get_seed();

        DropoutConfig {
            probability: probability,
            seed: seed,
        }
    }
}

impl ::std::default::Default for DropoutConfig {
    fn default() -> DropoutConfig {
        DropoutConfig {
            probability: 0.75,
            seed: 42,
        }
    }
}