21.10.2021, 9:00 - 11:00: Due to updates GitLab may be unavailable for some minutes between 09:00 and 11:00.

test_WLSProblem.cpp 16.5 KB
Newer Older
Tobias Lasser's avatar
Tobias Lasser committed
1
2
3
4
5
6
7
8
9
/**
 * \file test_WLSProblem.cpp
 *
 * \brief Tests for the WLSProblem class
 *
 * \author Matthias Wieczorek - initial code
 * \author Maximilian Hornung - modularization
 * \author David Frank - rewrite
 * \author Tobias Lasser - rewrite, modernization
10
 * \author Nikola Dinev - added tests for conversion constructor
Tobias Lasser's avatar
Tobias Lasser committed
11
12
13
14
15
16
 */

#include <catch2/catch.hpp>
#include "WLSProblem.h"
#include "Identity.h"
#include "Scaling.h"
17
18
19
20
21
22
#include "L2NormPow2.h"
#include "WeightedL2NormPow2.h"
#include "Quadric.h"
#include "TikhonovProblem.h"
#include "BlockLinearOperator.h"
#include "RandomBlocksDescriptor.h"
Tobias Lasser's avatar
Tobias Lasser committed
23
24
25

using namespace elsa;

26
TEMPLATE_TEST_CASE("Scenario: Testing WLSProblem", "", float, double)
Jens Petit's avatar
Jens Petit committed
27
28
29
30
31
{
    GIVEN("the operator and data")
    {
        IndexVector_t numCoeff(2);
        numCoeff << 7, 13;
Tobias Lasser's avatar
Tobias Lasser committed
32
33
        DataDescriptor dd(numCoeff);

34
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> bVec(dd.getNumberOfCoefficients());
Tobias Lasser's avatar
Tobias Lasser committed
35
        bVec.setRandom();
36
        DataContainer<TestType> dcB(dd, bVec);
Tobias Lasser's avatar
Tobias Lasser committed
37

38
        Identity<TestType> idOp(dd);
Tobias Lasser's avatar
Tobias Lasser committed
39

Jens Petit's avatar
Jens Petit committed
40
41
        WHEN("setting up a ls problem without x0")
        {
42
            WLSProblem<TestType> prob(idOp, dcB);
Tobias Lasser's avatar
Tobias Lasser committed
43

Jens Petit's avatar
Jens Petit committed
44
45
            THEN("the clone works correctly")
            {
Tobias Lasser's avatar
Tobias Lasser committed
46
47
48
49
50
51
                auto probClone = prob.clone();

                REQUIRE(probClone.get() != &prob);
                REQUIRE(*probClone == prob);
            }

Jens Petit's avatar
Jens Petit committed
52
53
            THEN("the problem behaves as expected")
            {
54
                DataContainer<TestType> dcZero(dd);
Tobias Lasser's avatar
Tobias Lasser committed
55
56
57
                REQUIRE(prob.getCurrentSolution() == dcZero);

                REQUIRE(prob.evaluate() == Approx(0.5 * bVec.squaredNorm()));
58
                REQUIRE(prob.getGradient() == static_cast<TestType>(-1.0f) * dcB);
Tobias Lasser's avatar
Tobias Lasser committed
59
60
61
62
63
64

                auto hessian = prob.getHessian();
                REQUIRE(hessian.apply(dcB) == dcB);
            }
        }

Jens Petit's avatar
Jens Petit committed
65
66
        WHEN("setting up a ls problem with x0")
        {
67
            Eigen::Matrix<TestType, Eigen::Dynamic, 1> x0Vec(dd.getNumberOfCoefficients());
Tobias Lasser's avatar
Tobias Lasser committed
68
            x0Vec.setRandom();
69
            DataContainer<TestType> dcX0(dd, x0Vec);
Tobias Lasser's avatar
Tobias Lasser committed
70

71
            WLSProblem<TestType> prob(idOp, dcB, dcX0);
Tobias Lasser's avatar
Tobias Lasser committed
72

Jens Petit's avatar
Jens Petit committed
73
74
            THEN("the clone works correctly")
            {
Tobias Lasser's avatar
Tobias Lasser committed
75
76
77
78
79
80
                auto probClone = prob.clone();

                REQUIRE(probClone.get() != &prob);
                REQUIRE(*probClone == prob);
            }

Jens Petit's avatar
Jens Petit committed
81
82
            THEN("the problem behaves as expected")
            {
Tobias Lasser's avatar
Tobias Lasser committed
83
84
85
86
87
88
89
90
91
92
93
                REQUIRE(prob.getCurrentSolution() == dcX0);

                REQUIRE(prob.evaluate() == Approx(0.5 * (x0Vec - bVec).squaredNorm()));
                REQUIRE(prob.getGradient() == (dcX0 - dcB));

                auto hessian = prob.getHessian();
                REQUIRE(hessian.apply(dcB) == dcB);
            }
        }
    }

Jens Petit's avatar
Jens Petit committed
94
95
96
97
    GIVEN("weights, operator and data")
    {
        IndexVector_t numCoeff(3);
        numCoeff << 7, 13, 17;
Tobias Lasser's avatar
Tobias Lasser committed
98
99
        DataDescriptor dd(numCoeff);

100
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> bVec(dd.getNumberOfCoefficients());
Tobias Lasser's avatar
Tobias Lasser committed
101
        bVec.setRandom();
102
        DataContainer<TestType> dcB(dd, bVec);
Tobias Lasser's avatar
Tobias Lasser committed
103

104
        Identity<TestType> idOp(dd);
Tobias Lasser's avatar
Tobias Lasser committed
105

106
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> weightsVec(dd.getNumberOfCoefficients());
Tobias Lasser's avatar
Tobias Lasser committed
107
108
109
110
        weightsVec.setRandom();
        DataContainer dcWeights(dd, weightsVec);
        Scaling scaleOp(dd, dcWeights);

Jens Petit's avatar
Jens Petit committed
111
112
        WHEN("setting up a wls problem without x0")
        {
113
            WLSProblem<TestType> prob(scaleOp, idOp, dcB);
Tobias Lasser's avatar
Tobias Lasser committed
114

Jens Petit's avatar
Jens Petit committed
115
116
            THEN("the clone works correctly")
            {
Tobias Lasser's avatar
Tobias Lasser committed
117
118
119
120
121
122
                auto probClone = prob.clone();

                REQUIRE(probClone.get() != &prob);
                REQUIRE(*probClone == prob);
            }

Jens Petit's avatar
Jens Petit committed
123
124
            THEN("the problem behaves as expected")
            {
125
                DataContainer<TestType> dcZero(dd);
Tobias Lasser's avatar
Tobias Lasser committed
126
127
                REQUIRE(prob.getCurrentSolution() == dcZero);

Jens Petit's avatar
Jens Petit committed
128
129
                REQUIRE(prob.evaluate()
                        == Approx(0.5 * bVec.dot((weightsVec.array() * bVec.array()).matrix())));
130
                REQUIRE(prob.getGradient() == static_cast<TestType>(-1.0f) * dcWeights * dcB);
Tobias Lasser's avatar
Tobias Lasser committed
131
132
133
134
135
136

                auto hessian = prob.getHessian();
                REQUIRE(hessian.apply(dcB) == dcWeights * dcB);
            }
        }

Jens Petit's avatar
Jens Petit committed
137
138
        WHEN("setting up a wls problem with x0")
        {
139
            Eigen::Matrix<TestType, Eigen::Dynamic, 1> x0Vec(dd.getNumberOfCoefficients());
Tobias Lasser's avatar
Tobias Lasser committed
140
            x0Vec.setRandom();
141
            DataContainer<TestType> dcX0(dd, x0Vec);
Tobias Lasser's avatar
Tobias Lasser committed
142

143
            WLSProblem<TestType> prob(scaleOp, idOp, dcB, dcX0);
Tobias Lasser's avatar
Tobias Lasser committed
144

Jens Petit's avatar
Jens Petit committed
145
146
            THEN("the clone works correctly")
            {
Tobias Lasser's avatar
Tobias Lasser committed
147
148
149
150
151
152
                auto probClone = prob.clone();

                REQUIRE(probClone.get() != &prob);
                REQUIRE(*probClone == prob);
            }

Jens Petit's avatar
Jens Petit committed
153
154
            THEN("the problem behaves as expected")
            {
155
                DataContainer<TestType> dcZero(dd);
Tobias Lasser's avatar
Tobias Lasser committed
156
157
                REQUIRE(prob.getCurrentSolution() == dcX0);

Jens Petit's avatar
Jens Petit committed
158
159
160
161
162
                REQUIRE(
                    prob.evaluate()
                    == Approx(0.5
                              * (x0Vec - bVec)
                                    .dot((weightsVec.array() * (x0Vec - bVec).array()).matrix())));
Tobias Lasser's avatar
Tobias Lasser committed
163
164
165
166
167
168
169
                REQUIRE(prob.getGradient() == dcWeights * (dcX0 - dcB));

                auto hessian = prob.getHessian();
                REQUIRE(hessian.apply(dcB) == dcWeights * dcB);
            }
        }
    }
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230

    GIVEN("an optimization problem with only a (w)ls data term")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};

        Scaling<TestType> W{desc, static_cast<TestType>(3.0)};

        LinearResidual<TestType> residual{desc};
        L2NormPow2<TestType> func{residual};
        WeightedL2NormPow2<TestType> weightedFunc{residual, W};
        Problem<TestType> prob{func};
        Problem<TestType> weightedProb{weightedFunc};

        WHEN("converting to a wls problem")
        {
            WLSProblem<TestType> lsProb{prob};
            WLSProblem<TestType> wlsProb{weightedProb};

            THEN("only the type of the problem changes")
            {
                REQUIRE(lsProb.getDataTerm() == prob.getDataTerm());
                REQUIRE(lsProb.getRegularizationTerms() == prob.getRegularizationTerms());
                REQUIRE(wlsProb.getDataTerm() == weightedProb.getDataTerm());
                REQUIRE(wlsProb.getRegularizationTerms() == weightedProb.getRegularizationTerms());
            }
        }
    }

    GIVEN("an optimization problem with a non-(w)ls data term")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};

        Quadric<TestType> quadric{desc};
        Problem prob{quadric};

        WHEN("converting to a WLSProblem")
        {
            THEN("an exception is thrown") { REQUIRE_THROWS(WLSProblem<TestType>{prob}); }
        }
    }

    GIVEN("an optimization problem with a non-(w)ls regularization term")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};

        Quadric<TestType> quadric{desc};
        RegularizationTerm regTerm{static_cast<TestType>(5), quadric};
        Problem prob{L2NormPow2<TestType>{desc}, regTerm};

        WHEN("converting to a WLSProblem")
        {
            THEN("an exception is thrown") { REQUIRE_THROWS(WLSProblem<TestType>{prob}); }
        }
    }

    GIVEN("an optimization problem with a wls data term that has negative weighting factors")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};

        Scaling<TestType> W1{desc, static_cast<TestType>(-3.0)};

231
232
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> anisotropicW =
            Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Constant(343, 1);
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
        anisotropicW[256] = -3.0;

        Scaling<TestType> W2{desc, DataContainer(desc, anisotropicW)};

        LinearResidual<TestType> residual{desc};
        WeightedL2NormPow2<TestType> weightedFunc1{residual, W1};
        WeightedL2NormPow2<TestType> weightedFunc2{residual, W2};

        WHEN("convering to a WLSProblem and no regularization terms are present")
        {
            Problem prob1{weightedFunc1};
            Problem prob2{weightedFunc2};

            WLSProblem converted1{prob1};
            WLSProblem converted2{prob2};

            THEN("only the type of the problem changes")
            {
                REQUIRE(prob1.getDataTerm() == converted1.getDataTerm());
                REQUIRE(prob1.getRegularizationTerms() == converted1.getRegularizationTerms());
                REQUIRE(prob2.getDataTerm() == converted2.getDataTerm());
                REQUIRE(prob2.getRegularizationTerms() == converted2.getRegularizationTerms());
            }
        }

        WHEN("convering to a WLSProblem and regularization terms are present")
        {
            RegularizationTerm regTerm{static_cast<TestType>(1.0), L2NormPow2<TestType>{desc}};
            Problem prob1{weightedFunc1, regTerm};
            Problem prob2{weightedFunc2, regTerm};

            THEN("an exception is thrown")
            {
                REQUIRE_THROWS(WLSProblem{prob1});
                REQUIRE_THROWS(WLSProblem{prob2});
            }
        }
    }

    GIVEN("an optimization problem with a (w)ls regularization term that has negative weighting "
          "factors")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};

        Scaling<TestType> W1{desc, static_cast<TestType>(-3.0)};

279
280
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> anisotropicW =
            Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Constant(343, 1);
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
        anisotropicW[256] = -3.0;

        Scaling<TestType> W2{desc, DataContainer(desc, anisotropicW)};

        LinearResidual<TestType> residual{desc};
        WeightedL2NormPow2<TestType> weightedFunc1{residual, W1};
        WeightedL2NormPow2<TestType> weightedFunc2{residual, W2};
        L2NormPow2<TestType> nonWeightedFunc{desc};

        RegularizationTerm negWeights{static_cast<TestType>(1.0), weightedFunc1};
        RegularizationTerm mixedWeights{static_cast<TestType>(1.0), weightedFunc2};
        RegularizationTerm noWeightsNegLambda{static_cast<TestType>(-1.0), nonWeightedFunc};
        WHEN("convering to a WLSProblem")
        {
            Problem prob1{L2NormPow2<TestType>{desc}, negWeights};
            Problem prob2{L2NormPow2<TestType>{desc}, mixedWeights};
            Problem prob3{L2NormPow2<TestType>{desc}, noWeightsNegLambda};

            THEN("an exception is thrown")
            {
                REQUIRE_THROWS(WLSProblem{prob1});
                REQUIRE_THROWS(WLSProblem{prob2});
                REQUIRE_THROWS(WLSProblem{prob3});
            }
        }
    }

    GIVEN("an OptimizationProblem with only (w)ls terms")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};
311
312
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> vec =
            Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Random(343);
313
314
315
316
317
        DataContainer<TestType> b{desc, vec};

        Scaling<TestType> A{desc, static_cast<TestType>(2.0)};

        Scaling<TestType> isoW{desc, static_cast<TestType>(3.0)};
318
319
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> vecW =
            Eigen::abs(Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Random(343).array());
320
321
322
323
324
325
326
327
328
329
330
331
332
333
        DataContainer<TestType> dcW{desc, vecW};
        Scaling<TestType> nonIsoW{desc, dcW};

        std::vector<std::unique_ptr<Functional<TestType>>> dataTerms;

        dataTerms.push_back(std::make_unique<L2NormPow2<TestType>>(desc));
        dataTerms.push_back(std::make_unique<L2NormPow2<TestType>>(LinearResidual{b}));
        dataTerms.push_back(std::make_unique<L2NormPow2<TestType>>(LinearResidual{A}));
        dataTerms.push_back(std::make_unique<L2NormPow2<TestType>>(LinearResidual{A, b}));
        dataTerms.push_back(
            std::make_unique<WeightedL2NormPow2<TestType>>(LinearResidual{A, b}, isoW));
        dataTerms.push_back(
            std::make_unique<WeightedL2NormPow2<TestType>>(LinearResidual{A, b}, nonIsoW));

334
335
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> regVec =
            Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Random(343);
336
337
338
339
340
        DataContainer<TestType> bReg{desc, regVec};

        Scaling<TestType> AReg{desc, static_cast<TestType>(0.25)};

        Scaling<TestType> isoWReg{desc, static_cast<TestType>(1.5)};
341
342
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> vecWReg =
            Eigen::abs(Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Random(343).array());
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
        DataContainer<TestType> dcWReg{desc, vecWReg};
        Scaling<TestType> nonIsoWReg{desc, dcWReg};

        std::vector<std::unique_ptr<RegularizationTerm<TestType>>> regTerms;
        auto weight = static_cast<TestType>(0.5);
        regTerms.push_back(
            std::make_unique<RegularizationTerm<TestType>>(weight, L2NormPow2<TestType>{desc}));
        regTerms.push_back(std::make_unique<RegularizationTerm<TestType>>(
            weight, L2NormPow2<TestType>{LinearResidual{bReg}}));
        regTerms.push_back(std::make_unique<RegularizationTerm<TestType>>(
            weight, L2NormPow2<TestType>{LinearResidual{AReg}}));
        regTerms.push_back(std::make_unique<RegularizationTerm<TestType>>(
            weight, L2NormPow2<TestType>{LinearResidual{AReg, bReg}}));
        regTerms.push_back(std::make_unique<RegularizationTerm<TestType>>(
            weight, WeightedL2NormPow2{LinearResidual{AReg, bReg}, isoWReg}));
        regTerms.push_back(std::make_unique<RegularizationTerm<TestType>>(
            weight, WeightedL2NormPow2{LinearResidual{AReg, bReg}, nonIsoWReg}));

        std::array descriptions = {"has no operator and no vector",
                                   "has no operator, but has a vector",
                                   "has an operator, but no vector",
                                   "has an operator and a vector",
                                   "has an operator and a vector, and is weighted (isotropic)",
                                   "has an operator and a vector, and is weighted (nonisotropic)"};

        for (std::size_t i = 0; i < descriptions.size(); i++) {
            for (std::size_t j = 0; j < descriptions.size(); j++) {
                WHEN(std::string("The data term ") + descriptions[i] + ". The regularization term "
                     + descriptions[j])
                {
373
374
                    Eigen::Matrix<TestType, Eigen::Dynamic, 1> xVec =
                        Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Random(343);
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
                    DataContainer<TestType> x{desc, xVec};
                    Problem prob{*dataTerms[i], *regTerms[j], x};

                    THEN("the problem can be converted and all operations yield the same result as "
                         "for the initial problem")
                    {
                        WLSProblem<TestType> converted{prob};
                        REQUIRE(prob.evaluate() == Approx(converted.evaluate()));

                        auto gradDiff = prob.getGradient();
                        gradDiff -= converted.getGradient();
                        REQUIRE(gradDiff.squaredL2Norm()
                                == Approx(0).margin(std::numeric_limits<TestType>::epsilon()));
                    }
                }
            }
        }
    }

    GIVEN("a TikhonovProblem with L2 regularization")
    {
        DataDescriptor desc{IndexVector_t::Constant(1, 343)};
397
398
        Eigen::Matrix<TestType, Eigen::Dynamic, 1> vec =
            Eigen::Matrix<TestType, Eigen::Dynamic, 1>::Random(343);
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
        DataContainer<TestType> b{desc, vec};

        Scaling<TestType> A{desc, static_cast<TestType>(2.0)};
        WLSProblem<TestType> prob{A, b};

        TestType regWeight = 4.0;
        TikhonovProblem<TestType> l2Reg{prob,
                                        RegularizationTerm{regWeight, L2NormPow2<TestType>{desc}}};

        THEN("the problem can be converted into a block form WLSProblem")
        {
            WLSProblem<TestType> conv{l2Reg};

            Scaling<TestType> lambdaScaling(desc, std::sqrt(regWeight));
            std::vector<std::unique_ptr<LinearOperator<TestType>>> opList(0);
            opList.push_back(A.clone());
            opList.push_back(lambdaScaling.clone());
            BlockLinearOperator<TestType> blockOp{opList,
                                                  BlockLinearOperator<TestType>::BlockType::ROW};

            std::vector<std::unique_ptr<DataDescriptor>> descList(0);
            descList.push_back(desc.clone());
            descList.push_back(desc.clone());
            RandomBlocksDescriptor vecDesc{descList};
            DataContainer<TestType> blockVec{vecDesc};
            blockVec.getBlock(0) = b;
            blockVec.getBlock(1) = 0;

            L2NormPow2<TestType> blockWls{LinearResidual<TestType>{blockOp, blockVec}};
            REQUIRE(conv.getDataTerm() == blockWls);
            REQUIRE(conv.getRegularizationTerms().empty());
        }
    }
Tobias Lasser's avatar
Tobias Lasser committed
432
}