* whitespace cleanup

This commit is contained in:
Caner Candan 2011-05-05 17:15:10 +02:00
commit 70e60a50d2
195 changed files with 1763 additions and 1873 deletions

View file

@ -29,7 +29,7 @@ namespace l2
//---------------------------------------------------------------------------
// error
//---------------------------------------------------------------------------
real error(const mlp::net& net, const set& ts)
{
real error_ = 0.0;
@ -37,12 +37,12 @@ namespace l2
for (set::const_iterator s = ts.begin(); s != ts.end(); ++s)
{
vector out = net(s->input);
for (unsigned i = 0; i < out.size(); ++i)
{
real target = s->output[i];
real value = out[i];
error_ -= target * log(value + min_real) +
error_ -= target * log(value + min_real) +
(1.0 - target) * log(1.0 - value + min_real);
}
}
@ -53,25 +53,25 @@ namespace l2
//-------------------------------------------------------------------------
// l2
//-------------------------------------------------------------------------
class net: public qp::net
{
public:
net(mlp::net& n): qp::net(n) {}
real error(const set& ts)
{
real error_ = 0;
for (set::const_iterator s = ts.begin(); s != ts.end(); ++s)
{
forward(s->input);
error_ -= backward(s->input, s->output);
}
return error_;
}
private:
real backward(const vector& input, const vector& output)
{
@ -84,7 +84,7 @@ namespace l2
{
neuron& n = (*current_layer)[j];
real out = output[j];
n.ndelta += n.delta = (out - n.out) /
n.ndelta += n.delta = (out - n.out) /
(n.out * (1.0 - n.out) + min_real) * n.out * (1.0 - n.out);
if (size() == 1) // monolayer
@ -92,21 +92,21 @@ namespace l2
else // multilayer
for (unsigned k = 0; k < n.dxo.size(); ++k)
n.dxo[k] += n.delta * (*backward_layer)[k].out;
error_ += out * log(n.out + min_real) +
error_ += out * log(n.out + min_real) +
(1.0 - out) * log(1.0 - n.out + min_real);
}
// hidden layers
while (++current_layer != rend())
{
reverse_iterator forward_layer = current_layer - 1;
reverse_iterator backward_layer = current_layer + 1;
reverse_iterator backward_layer = current_layer + 1;
for (unsigned j = 0; j < current_layer->size(); ++j)
{
neuron& n = (*current_layer)[j];
real sum = 0;
real sum = 0;
for (unsigned k = 0; k < forward_layer->size(); ++k)
{
neuron& nf = (*forward_layer)[k];
@ -114,7 +114,7 @@ namespace l2
}
n.delta = n.out * (1.0 - n.out) * sum;
n.ndelta += n.delta;
if (backward_layer == rend()) // first hidden layer
n.dxo += n.delta * input;
else // rest of hidden layers
@ -122,19 +122,19 @@ namespace l2
n.dxo[k] += n.delta * (*backward_layer)[k].out;
}
}
return error_;
}
};
//---------------------------------------------------------------------------
} // namespace l2
//-----------------------------------------------------------------------------
#endif // l2_h
// Local Variables:
// mode:C++
// Local Variables:
// mode:C++
// End: