<p>Hey,</p>

<p>I'm working with the ANN module and have started with the feed_forward test. I'm trying to return the built network back to where I was calling it from so that I can use it there (and I was running into a lot of problems because of the template programming part more details <a href="http://stackoverflow.com/questions/35703764/c-return-from-template">here</a>, which I was able to solve by compiling with -std=c++1y and return auto). However, when I tried to train the returned built network, it threw a matrix multiplication exception, which I'm assuming meant that something got corrupted while returning. So, I thought I could just train the network and save it, so that I could reload it later for prediction. The below does that and saves the network parameters in "test" file. The problem is while loading now I have to specify the type of the network (which was determined by decltype(modules) and decltype(classOutputLayer)). Furthermore, the XML file that was saved is just par
 ameters,
  which I'm guessing is the values of the weight matrix and does not indicate the type of the FFN. Is there a way to fix this problem?</p>

<pre><code>#include &lt;iostream&gt;
#include &lt;fstream&gt;

#include &lt;mlpack/core.hpp&gt;

#include &lt;mlpack/methods/ann/activation_functions/logistic_function.hpp&gt;
#include &lt;mlpack/methods/ann/activation_functions/tanh_function.hpp&gt;

#include &lt;mlpack/methods/ann/init_rules/random_init.hpp&gt;

#include &lt;mlpack/methods/ann/layer/bias_layer.hpp&gt;
#include &lt;mlpack/methods/ann/layer/linear_layer.hpp&gt;
#include &lt;mlpack/methods/ann/layer/base_layer.hpp&gt;
#include &lt;mlpack/methods/ann/layer/dropout_layer.hpp&gt;
#include &lt;mlpack/methods/ann/layer/binary_classification_layer.hpp&gt;

#include &lt;mlpack/methods/ann/ffn.hpp&gt;
#include &lt;mlpack/methods/ann/performance_functions/mse_function.hpp&gt;

using namespace mlpack;

template &lt;typename PerformanceFunction,
         typename OutputLayerType,
         typename PerformanceFunctionType,
         typename MatType = arma::mat
         &gt;
void BuildFFN(MatType&amp; trainData, MatType&amp; trainLabels, MatType&amp; testData, MatType&amp; testLabels, const size_t hiddenLayerSize)
{
    // input layer
    ann::LinearLayer&lt;&gt; inputLayer(trainData.n_rows, hiddenLayerSize);
    ann::BiasLayer&lt;&gt; inputBiasLayer(hiddenLayerSize);
    ann::BaseLayer&lt;PerformanceFunction&gt; inputBaseLayer;

    // hidden layer
    ann::LinearLayer&lt;&gt; hiddenLayer1(hiddenLayerSize, trainLabels.n_rows);
    ann::BiasLayer&lt;&gt; hiddenBiasLayer1(trainLabels.n_rows);
    ann::BaseLayer&lt;PerformanceFunction&gt; outputLayer;

    // output layer
    OutputLayerType classOutputLayer;

    auto modules = std::tie(inputLayer, inputBiasLayer, inputBaseLayer, hiddenLayer1, hiddenBiasLayer1, outputLayer);
    ann::FFN&lt;decltype(modules), decltype(classOutputLayer), ann::RandomInitialization, PerformanceFunctionType&gt; net(modules, classOutputLayer);

    net.Train(trainData, trainLabels);
    //arma::mat prediction;
    //net.Predict(testData, prediction);

    std::ofstream ofs("test", std::ios::binary);
    boost::archive::xml_oarchive o(ofs);
    net.Serialize(o, 1);
    //o &lt;&lt; data::CreateNVP(net, "N");
    //ofs.close();

    //return net;
}

int main(int argc, char** argv)
{
    arma::mat dataset;
    data::Load("../data/thyroid_train.csv", dataset, true);
    arma::mat trainData = dataset.submat(0, 0, dataset.n_rows - 4, dataset.n_cols - 1);
    arma::mat trainLabels = dataset.submat(dataset.n_rows - 3, 0, dataset.n_rows - 1, dataset.n_cols - 1);

    data::Load("../data/thyroid_test.csv", dataset, true);
    arma::mat testData = dataset.submat(0, 0, dataset.n_rows - 4, dataset.n_cols - 1);
    arma::mat testLabels = dataset.submat(dataset.n_rows - 3, 0, dataset.n_rows - 1, dataset.n_cols - 1);

    std::cout &lt;&lt; "Loaded the training and testing datasets" &lt;&lt; std::endl;

    const size_t hiddenLayerSize = 8;

    //std::ifstream ifs("test2", std::ios::binary);
    //boost::archive::xml_iarchive i(ifs);

    //auto net = BuildFFN&lt;ann::LogisticFunction, ann::BinaryClassificationLayer, ann::MeanSquaredErrorFunction&gt;
        //(trainData, trainLabels, testData, testLabels, hiddenLayerSize);
    BuildFFN&lt;ann::LogisticFunction, ann::BinaryClassificationLayer, ann::MeanSquaredErrorFunction&gt;
        (trainData, trainLabels, testData, testLabels, hiddenLayerSize);

    //double classificationError;
    //for (size_t i = 0; i &lt; testData.n_cols; i++)
    //{
        //if (arma::sum(arma::sum(arma::abs(prediction.col(i) - testLabels.col(i)))) != 0)
        //{
            //classificationError++;
        //}
    //}

    //std::cout &lt;&lt; "Classification Error = " &lt;&lt; (double(classificationError) / testData.n_cols) * 100 &lt;&lt; "%" &lt;&lt; std::endl;

    return 0;
}

</code></pre>

<p>Thanks.</p>

<p style="font-size:small;-webkit-text-size-adjust:none;color:#666;">&mdash;<br>Reply to this email directly or <a href="https://github.com/mlpack/mlpack/issues/531">view it on GitHub</a>.<img alt="" height="1" src="https://github.com/notifications/beacon/AJ4bFJCWIAtBcyBoW1TQgYLl2s1UdOrDks5ppDRkgaJpZM4HmbCC.gif" width="1" /></p>
<div itemscope itemtype="http://schema.org/EmailMessage">
<div itemprop="action" itemscope itemtype="http://schema.org/ViewAction">
  <link itemprop="url" href="https://github.com/mlpack/mlpack/issues/531"></link>
  <meta itemprop="name" content="View Issue"></meta>
</div>
<meta itemprop="description" content="View this Issue on GitHub"></meta>
</div>