CoDiPack  2.3.0
A Code Differentiation Package
SciComp TU Kaiserslautern
Loading...
Searching...
No Matches
algorithms.hpp
1/*
2 * CoDiPack, a Code Differentiation Package
3 *
4 * Copyright (C) 2015-2024 Chair for Scientific Computing (SciComp), University of Kaiserslautern-Landau
5 * Homepage: http://scicomp.rptu.de
6 * Contact: Prof. Nicolas R. Gauger (codi@scicomp.uni-kl.de)
7 *
8 * Lead developers: Max Sagebaum, Johannes Blühdorn (SciComp, University of Kaiserslautern-Landau)
9 *
10 * This file is part of CoDiPack (http://scicomp.rptu.de/software/codi).
11 *
12 * CoDiPack is free software: you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, either version 3 of the
15 * License, or (at your option) any later version.
16 *
17 * CoDiPack is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty
19 * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
20 *
21 * See the GNU General Public License for more details.
22 * You should have received a copy of the GNU
23 * General Public License along with CoDiPack.
24 * If not, see <http://www.gnu.org/licenses/>.
25 *
26 * For other licensing options please contact us.
27 *
28 * Authors:
29 * - SciComp, University of Kaiserslautern-Landau:
30 * - Max Sagebaum
31 * - Johannes Blühdorn
32 * - Former members:
33 * - Tim Albring
34 */
35#pragma once
36
37#include "../config.h"
38#include "../expressions/lhsExpressionInterface.hpp"
39#include "../misc/exceptions.hpp"
40#include "../tapes/misc/tapeParameters.hpp"
41#include "../traits/adjointVectorTraits.hpp"
42#include "../traits/gradientTraits.hpp"
43#include "data/dummy.hpp"
44#include "data/jacobian.hpp"
45#include "data/staticDummy.hpp"
46
48namespace codi {
49
68 template<typename T_Type, bool T_ActiveChecks = true>
69 struct Algorithms {
70 public:
71
73 using Type = CODI_DD(T_Type, CODI_DEFAULT_LHS_EXPRESSION);
74
75 static bool constexpr ActiveChecks = T_ActiveChecks;
76
77 using Tape = typename Type::Tape;
78 using Position = typename Tape::Position;
79 using Real = typename Type::Real;
80 using Identifier = typename Type::Identifier;
81 using Gradient = typename Type::Gradient;
82
84
86 enum class EvaluationType {
87 Forward,
88 Reverse
89 };
90
94 static CODI_INLINE EvaluationType getEvaluationChoice(size_t const inputs, size_t const outputs) {
95 if (inputs <= outputs) {
96 return EvaluationType::Forward;
97 } else {
98 return EvaluationType::Reverse;
99 }
100 }
101
141 template<typename Jac, bool keepState = true>
142 static CODI_INLINE void computeJacobian(Tape& tape, Position const& start, Position const& end,
143 Identifier const* input, size_t const inputSize, Identifier const* output,
144 size_t const outputSize, Jac& jac,
146 // internally, automatic management is implemented in an optimized way that uses manual management
147 if (AdjointsManagement::Automatic == adjointsManagement) {
148 tape.resizeAdjointVector();
149 tape.beginUseAdjointVector();
150 }
151
152 computeJacobianCustomAdjoints(tape, start, end, input, inputSize, output, outputSize, jac,
153 tape.getInternalAdjoints());
154
155 if (AdjointsManagement::Automatic == adjointsManagement) {
156 tape.endUseAdjointVector();
157 }
158 }
159
160 // clang-format off
170 // clang-format on
171 template<typename Jac, typename AdjointVector, bool keepState = true>
172 static CODI_INLINE void computeJacobianCustomAdjoints(Tape& tape, Position const& start, Position const& end,
173 Identifier const* input, size_t const inputSize,
174 Identifier const* output, size_t const outputSize, Jac& jac,
175 AdjointVector&& adjoints) {
177
179
180 size_t constexpr gradDim = CustomGT::dim;
181
182 EvaluationType evalType = getEvaluationChoice(inputSize, outputSize);
183 if (EvaluationType::Forward == evalType) {
184 for (size_t j = 0; j < inputSize; j += gradDim) {
185 setGradientOnIdentifierCustomAdjoints(tape, j, input, inputSize, typename CustomGT::Real(1.0), adjoints);
186
187 if (keepState) {
188 tape.evaluateForwardKeepState(start, end, std::forward<AdjointVector>(adjoints));
189 } else {
190 tape.evaluateForward(start, end, std::forward<AdjointVector>(adjoints));
191 }
192
193 for (size_t i = 0; i < outputSize; i += 1) {
194 for (size_t curDim = 0; curDim < gradDim && j + curDim < inputSize; curDim += 1) {
195 jac(outputSize - i - 1, j + curDim) = CustomGT::at(adjoints[output[outputSize - i - 1]], curDim);
196 if (tape.isIdentifierActive(output[i])) {
197 CustomGT::at(adjoints[output[outputSize - i - 1]], curDim) = typename GT::Real();
198 }
199 }
200 }
201
202 setGradientOnIdentifierCustomAdjoints(tape, j, input, inputSize, typename CustomGT::Real(), adjoints);
203 }
204
205 tape.clearCustomAdjoints(end, start, std::forward<AdjointVector>(adjoints));
206
207 } else if (EvaluationType::Reverse == evalType) {
208 for (size_t i = 0; i < outputSize; i += gradDim) {
209 setGradientOnIdentifierCustomAdjoints(tape, i, output, outputSize, typename CustomGT::Real(1.0), adjoints);
210
211 if (keepState) {
212 tape.evaluateKeepState(end, start, std::forward<AdjointVector>(adjoints));
213 } else {
214 tape.evaluate(end, start, std::forward<AdjointVector>(adjoints));
215 }
216
217 for (size_t j = 0; j < inputSize; j += 1) {
218 for (size_t curDim = 0; curDim < gradDim && i + curDim < outputSize; curDim += 1) {
219 jac(i + curDim, j) = CustomGT::at(adjoints[input[j]], curDim);
220 CustomGT::at(adjoints[input[j]], curDim) = typename CustomGT::Real();
221 }
222 }
223
224 setGradientOnIdentifierCustomAdjoints(tape, i, output, outputSize, typename CustomGT::Real(), adjoints);
225
227 tape.clearCustomAdjoints(end, start, std::forward<AdjointVector>(adjoints));
228 }
229 }
230 } else {
231 CODI_EXCEPTION("Evaluation mode not implemented. Mode is: %d.", (int)evalType);
232 }
233 }
234
235 // clang-format off
239 // clang-format on
240 template<typename Jac>
241 static CODI_INLINE void computeJacobian(Position const& start, Position const& end, Identifier const* input,
242 size_t const inputSize, Identifier const* output, size_t const outputSize,
243 Jac& jac,
245 computeJacobian(Type::getTape(), start, end, input, inputSize, output, outputSize, jac, adjointsManagement);
246 }
247
248 // clang-format off
252 // clang-format on
253 template<typename Jac, typename AdjointVector>
254 static CODI_INLINE void computeJacobianCustomAdjoints(Position const& start, Position const& end,
255 Identifier const* input, size_t const inputSize,
256 Identifier const* output, size_t const outputSize, Jac& jac,
257 AdjointVector&& adjoints) {
258 computeJacobianCustomAdjoints<Jac, AdjointVector>(Type::getTape(), start, end, input, inputSize, output,
259 outputSize, jac, std::forward<AdjointVector>(adjoints));
260 }
261
280 template<typename Hes, typename Jac = DummyJacobian>
281 static CODI_INLINE void computeHessianPrimalValueTape(Tape& tape, Position const& start, Position const& end,
282 Identifier const* input, size_t const inputSize,
283 Identifier const* output, size_t const outputSize, Hes& hes,
285 EvaluationType evalType = getEvaluationChoice(inputSize, outputSize);
286 if (EvaluationType::Forward == evalType) {
287 computeHessianPrimalValueTapeForward(tape, start, end, input, inputSize, output, outputSize, hes, jac);
288 } else if (EvaluationType::Reverse == evalType) {
289 computeHessianPrimalValueTapeReverse(tape, start, end, input, inputSize, output, outputSize, hes, jac);
290 } else {
291 CODI_EXCEPTION("Evaluation mode not implemented. Mode is: %d.", (int)evalType);
292 }
293 }
294
306 template<typename Hes, typename Jac = DummyJacobian>
308 Position const& end, Identifier const* input,
309 size_t const inputSize, Identifier const* output,
310 size_t const outputSize, Hes& hes,
312 using GT1st = GT;
313 size_t constexpr gradDim1st = GT1st::dim;
314 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
315 size_t constexpr gradDim2nd = GT2nd::dim;
316
317 // Assume that the tape was just recorded.
318 tape.revertPrimals(start);
319
320 for (size_t j = 0; j < inputSize; j += gradDim2nd) {
321 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real(1.0));
322
323 // The k = j init is no problem, it will evaluate slightly more elements around the diagonal.
324 for (size_t k = j; k < inputSize; k += gradDim1st) {
325 setGradientOnIdentifier(tape, k, input, inputSize, typename GT1st::Real(1.0));
326
327 tape.evaluateForward(start, end);
328
329 for (size_t i = 0; i < outputSize; i += 1) {
330 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && k + vecPos1st < inputSize; vecPos1st += 1) {
331 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < inputSize; vecPos2nd += 1) {
332 hes(i, j + vecPos2nd, k + vecPos1st) =
333 GT2nd::at(GT1st::at(tape.getGradient(output[i]), vecPos1st).gradient(), vecPos2nd);
334 hes(i, k + vecPos1st, j + vecPos2nd) = hes(i, j + vecPos2nd, k + vecPos1st); // Symmetry
335 }
336 }
337
338 if (j == 0) {
339 for (size_t vecPos = 0; vecPos < gradDim1st && k + vecPos < inputSize; vecPos += 1) {
340 jac(i, k + vecPos) = GT1st::at(tape.getGradient(output[i]), vecPos).value();
341 }
342 }
343 }
344
345 setGradientOnIdentifier(tape, k, input, inputSize, typename GT1st::Real());
346 }
347
348 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real());
349 }
350 }
351
363 template<typename Hes, typename Jac = DummyJacobian>
365 Position const& end, Identifier const* input,
366 size_t const inputSize, Identifier const* output,
367 size_t const outputSize, Hes& hes,
369 using GT1st = GT;
370 size_t constexpr gradDim1st = GT1st::dim;
371 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
372 size_t constexpr gradDim2nd = GT2nd::dim;
373
374 // Assume that the tape was just recorded.
375 tape.revertPrimals(start);
376
377 for (size_t j = 0; j < inputSize; j += gradDim2nd) {
378 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real(1.0));
379
380 // Propagate the new derivative information.
381 tape.evaluatePrimal(start, end);
382
383 for (size_t i = 0; i < outputSize; i += gradDim1st) {
384 setGradientOnIdentifier(tape, i, output, outputSize, typename GT1st::Real(1.0));
385
386 // Propagate the derivatives backward for second order derivatives.
387 tape.evaluateKeepState(end, start);
388
389 for (size_t k = 0; k < inputSize; k += 1) {
390 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < outputSize; vecPos1st += 1) {
391 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < inputSize; vecPos2nd += 1) {
392 hes(i + vecPos1st, j + vecPos2nd, k) =
393 GT2nd::at(GT1st::at(tape.gradient(input[k]), vecPos1st).gradient(), vecPos2nd);
394 }
395 }
396
397 if (j == 0) {
398 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < outputSize; vecPos1st += 1) {
399 jac(i + vecPos1st, k) = GT1st::at(tape.getGradient(input[k]), vecPos1st).value();
400 }
401 }
402
403 tape.gradient(input[k]) = Gradient();
404 }
405
406 setGradientOnIdentifier(tape, i, output, outputSize, typename GT1st::Real());
407
409 tape.clearAdjoints(end, start);
410 }
411 }
412
413 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real());
414
415 if (j + gradDim2nd < inputSize) {
416 tape.revertPrimals(start);
417 }
418 }
419 }
420
438 template<typename Func, typename VecIn, typename VecOut, typename Hes, typename Jac = DummyJacobian>
439 static CODI_INLINE void computeHessian(Func func, VecIn& input, VecOut& output, Hes& hes,
441 EvaluationType evalType = getEvaluationChoice(input.size(), output.size());
442 if (EvaluationType::Forward == evalType) {
443 computeHessianForward(func, input, output, hes, jac);
444 } else if (EvaluationType::Reverse == evalType) {
445 computeHessianReverse(func, input, output, hes, jac);
446 } else {
447 CODI_EXCEPTION("Evaluation mode not implemented. Mode is: %d.", (int)evalType);
448 }
449 }
450
464 template<typename Func, typename VecIn, typename VecOut, typename Hes, typename Jac = DummyJacobian>
465 static CODI_INLINE void computeHessianForward(Func func, VecIn& input, VecOut& output, Hes& hes,
467 using GT1st = GT;
468 size_t constexpr gradDim1st = GT1st::dim;
469 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
470 size_t constexpr gradDim2nd = GT2nd::dim;
471
472 Tape& tape = Type::getTape();
473
474 for (size_t j = 0; j < input.size(); j += gradDim2nd) {
475 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real(1.0));
476
477 // Propagate the new derivative information.
478 recordTape(func, input, output);
479
480 // The k = j init is no problem, it will evaluate slightly more elements around the diagonal.
481 for (size_t k = j; k < input.size(); k += gradDim1st) {
482 setGradientOnCoDiValue(tape, k, input.data(), input.size(), typename GT1st::Real(1.0));
483
484 // Propagate the derivatives forward for second order derivatives.
485 tape.evaluateForwardKeepState(tape.getZeroPosition(), tape.getPosition());
486
487 for (size_t i = 0; i < output.size(); i += 1) {
488 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && k + vecPos1st < input.size(); vecPos1st += 1) {
489 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < input.size(); vecPos2nd += 1) {
490 hes(i, j + vecPos2nd, k + vecPos1st) = GT2nd::at(
491 GT1st::at(tape.getGradient(output[i].getIdentifier()), vecPos1st).gradient(), vecPos2nd);
492 hes(i, k + vecPos1st, j + vecPos2nd) = hes(i, j + vecPos2nd, k + vecPos1st); // Symmetry
493 }
494 }
495
496 if (j == 0) {
497 for (size_t vecPos = 0; vecPos < gradDim1st && k + vecPos < input.size(); vecPos += 1) {
498 jac(i, k + vecPos) = GT1st::at(tape.getGradient(output[i].getIdentifier()), vecPos).value();
499 }
500 }
501 }
502
503 setGradientOnCoDiValue(tape, k, input.data(), input.size(), typename GT1st::Real());
504 }
505
506 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real());
507
508 tape.reset();
509 }
510 }
511
525 template<typename Func, typename VecIn, typename VecOut, typename Hes, typename Jac = DummyJacobian>
526 static CODI_INLINE void computeHessianReverse(Func func, VecIn& input, VecOut& output, Hes& hes,
528 using GT1st = GT;
529 size_t constexpr gradDim1st = GT1st::dim;
530 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
531 size_t constexpr gradDim2nd = GT2nd::dim;
532
533 Tape& tape = Type::getTape();
534
535 for (size_t j = 0; j < input.size(); j += gradDim2nd) {
536 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real(1.0));
537
538 // Propagate the new derivative information.
539 recordTape(func, input, output);
540
541 for (size_t i = 0; i < output.size(); i += gradDim1st) {
542 setGradientOnCoDiValue(tape, i, output.data(), output.size(), typename GT1st::Real(1.0));
543
544 // Propagate the derivatives backward for second order derivatives.
545 tape.evaluateKeepState(tape.getPosition(), tape.getZeroPosition());
546
547 for (size_t k = 0; k < input.size(); k += 1) {
548 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < output.size(); vecPos1st += 1) {
549 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < input.size(); vecPos2nd += 1) {
550 hes(i + vecPos1st, j + vecPos2nd, k) =
551 GT2nd::at(GT1st::at(tape.gradient(input[k].getIdentifier()), vecPos1st).gradient(), vecPos2nd);
552 }
553 }
554
555 if (j == 0) {
556 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < output.size(); vecPos1st += 1) {
557 jac(i + vecPos1st, k) = GT1st::at(tape.getGradient(input[k].getIdentifier()), vecPos1st).value();
558 }
559 }
560
561 tape.gradient(input[k].getIdentifier()) = Gradient();
562 }
563
564 setGradientOnCoDiValue(tape, i, output.data(), output.size(), typename GT1st::Real());
565
567 tape.clearAdjoints(tape.getPosition(), tape.getZeroPosition());
568 }
569 }
570
571 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real());
572
573 tape.reset();
574 }
575 }
576
577 private:
578
585 template<typename T>
586 static CODI_INLINE void setGradientOnIdentifier(
587 Tape& tape, size_t const pos, Identifier const* identifiers, size_t const size, T value,
589 size_t constexpr gradDim = GT::dim;
590
591 for (size_t curDim = 0; curDim < gradDim && pos + curDim < size; curDim += 1) {
592 if (CODI_ENABLE_CHECK(ActiveChecks, tape.isIdentifierActive(identifiers[pos + curDim]))) {
593 GT::at(tape.gradient(identifiers[pos + curDim], adjointsManagement), curDim) = value;
594 }
595 }
596 }
597
598 template<typename T, typename Adjoints>
599 static CODI_INLINE void setGradientOnIdentifierCustomAdjoints(Tape& tape, size_t const pos,
600 Identifier const* identifiers, size_t const size,
601 T value, Adjoints& adjoints) {
602 size_t constexpr gradDim = GT::dim;
603
604 for (size_t curDim = 0; curDim < gradDim && pos + curDim < size; curDim += 1) {
605 if (CODI_ENABLE_CHECK(ActiveChecks, tape.isIdentifierActive(identifiers[pos + curDim]))) {
606 GT::at(adjoints[identifiers[pos + curDim]], curDim) = value;
607 }
608 }
609 }
610
612 template<typename T>
613 static CODI_INLINE void setGradient2ndOnIdentifier(Tape& tape, size_t const pos, Identifier const* identifiers,
614 size_t const size, T value) {
615 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
616 size_t constexpr gradDim2nd = GT2nd::dim;
617
618 for (size_t curDim = 0; curDim < gradDim2nd && pos + curDim < size; curDim += 1) {
619 // No activity check on the identifier required since forward types are used.
620 GT2nd::at(tape.primal(identifiers[pos + curDim]).gradient(), curDim) = value;
621 }
622 }
623
630 template<typename T>
631 static CODI_INLINE void setGradientOnCoDiValue(Tape& tape, size_t const pos, Type* identifiers, size_t const size,
632 T value) {
633 size_t constexpr gradDim = GT::dim;
634
635 for (size_t curDim = 0; curDim < gradDim && pos + curDim < size; curDim += 1) {
636 if (CODI_ENABLE_CHECK(ActiveChecks, tape.isIdentifierActive(identifiers[pos + curDim].getIdentifier()))) {
637 GT::at(tape.gradient(identifiers[pos + curDim].getIdentifier()), curDim) = value;
638 }
639 }
640 }
641
643 template<typename T>
644 static CODI_INLINE void setGradient2ndOnCoDiValue(size_t const pos, Type* identifiers, size_t const size,
645 T value) {
646 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
647 size_t constexpr gradDim2nd = GT2nd::dim;
648
649 for (size_t curDim = 0; curDim < gradDim2nd && pos + curDim < size; curDim += 1) {
650 // No activity check on the identifier required since forward types are used.
651 GT2nd::at(identifiers[pos + curDim].value().gradient(), curDim) = value;
652 }
653 }
654
656 template<typename Func, typename VecIn, typename VecOut>
657 static CODI_INLINE void recordTape(Func func, VecIn& input, VecOut& output) {
658 Tape& tape = Type::getTape();
659 tape.setActive();
660 for (size_t curIn = 0; curIn < input.size(); curIn += 1) {
661 tape.registerInput(input[curIn]);
662 }
663
664 func(input, output);
665
666 for (size_t curOut = 0; curOut < output.size(); curOut += 1) {
667 tape.registerOutput(output[curOut]);
668 }
669 tape.setPassive();
670 }
671 };
672
673}
#define CODI_INLINE
See codi::Config::ForcedInlines.
Definition config.h:457
#define CODI_DD(Type, Default)
Abbreviation for CODI_DECLARE_DEFAULT.
Definition macros.hpp:94
#define CODI_ENABLE_CHECK(option, condition)
Definition macros.hpp:53
typename GradientImplementation< AdjointVector >::Gradient Gradient
Deduce the entry type from an adjoint vector type, usually identical to the gradient type of a tape.
Definition adjointVectorTraits.hpp:92
bool constexpr ReversalZeroesAdjoints
With a linear index management, control if adjoints are set to zero during reversal.
Definition config.h:289
CoDiPack - Code Differentiation Package.
Definition codi.hpp:91
AdjointsManagement
Policies for management of the tape's interal adjoints.
Definition tapeParameters.hpp:98
@ Automatic
Manage internal adjoints automatically, including locking, bounds checking, and resizing.
typename Tape::Gradient Gradient
See LhsExpressionInterface.
Definition activeTypeBase.hpp:79
Basic algorithms for tape evaluation in CoDiPack.
Definition algorithms.hpp:69
typename Type::Gradient Gradient
See LhsExpressionInterface.
Definition algorithms.hpp:81
T_Type Type
See Algorithms.
Definition algorithms.hpp:73
EvaluationType
Evaluation modes for the derivative computation.
Definition algorithms.hpp:86
typename Tape::Position Position
See LhsExpressionInterface.
Definition algorithms.hpp:78
static void computeHessianPrimalValueTapeForward(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Forward version of the Hessian computation.
Definition algorithms.hpp:307
static void computeJacobian(Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Jac &jac, AdjointsManagement adjointsManagement=AdjointsManagement::Automatic)
Compute the Jacobian with multiple tape sweeps. This method uses the global tape for the Jacobian...
Definition algorithms.hpp:241
static bool constexpr ActiveChecks
See Algorithms.
Definition algorithms.hpp:75
static void computeJacobianCustomAdjoints(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Jac &jac, AdjointVector &&adjoints)
Compute the Jacobian with multiple tape sweeps using a custom adjoint vector.
Definition algorithms.hpp:172
static void computeHessian(Func func, VecIn &input, VecOut &output, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Compute the Hessian with multiple tape recordings and sweeps.
Definition algorithms.hpp:439
typename Type::Tape Tape
See LhsExpressionInterface.
Definition algorithms.hpp:77
GradientTraits::TraitsImplementation< Gradient > GT
Shortcut for traits of gradient.
Definition algorithms.hpp:83
typename Type::Identifier Identifier
See LhsExpressionInterface.
Definition algorithms.hpp:80
static void computeHessianForward(Func func, VecIn &input, VecOut &output, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Forward version of the Hessian computation with a function object.
Definition algorithms.hpp:465
static void computeJacobianCustomAdjoints(Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Jac &jac, AdjointVector &&adjoints)
Compute the Jacobian with multiple tape sweeps using a custom adjoint vector. This method uses th...
Definition algorithms.hpp:254
static EvaluationType getEvaluationChoice(size_t const inputs, size_t const outputs)
Definition algorithms.hpp:94
typename Type::Real Real
See LhsExpressionInterface.
Definition algorithms.hpp:79
static void computeHessianPrimalValueTape(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Compute the Hessian with multiple tape sweeps.
Definition algorithms.hpp:281
static void computeJacobian(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Jac &jac, AdjointsManagement adjointsManagement=AdjointsManagement::Automatic)
Compute the Jacobian with multiple tape sweeps.
Definition algorithms.hpp:142
static void computeHessianReverse(Func func, VecIn &input, VecOut &output, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Reverse version of the Hessian computation with a function object.
Definition algorithms.hpp:526
static void computeHessianPrimalValueTapeReverse(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Reverse version of the Hessian computation.
Definition algorithms.hpp:364
Common traits for all types used as gradients.
Definition gradientTraits.hpp:64
static size_t constexpr dim
Number of dimensions this gradient value has.
Definition gradientTraits.hpp:70
static Real & at(Gradient &gradient, size_t dim)
Get the entry at the given index.
Definition gradientTraits.hpp:73
Gradient Real
The base value used in the gradient entries.
Definition gradientTraits.hpp:68
Static dummy objects for e.g. default reference arguments.
Definition staticDummy.hpp:42