potrf.hpp
Go to the documentation of this file.
1//===========================================================================
2/*!
3 *
4 *
5 * \brief -
6 *
7 * \author O. Krause
8 * \date 2011
9 *
10 *
11 * \par Copyright 1995-2015 Shark Development Team
12 *
13 * <BR><HR>
14 * This file is part of Shark.
15 * <http://image.diku.dk/shark/>
16 *
17 * Shark is free software: you can redistribute it and/or modify
18 * it under the terms of the GNU Lesser General Public License as published
19 * by the Free Software Foundation, either version 3 of the License, or
20 * (at your option) any later version.
21 *
22 * Shark is distributed in the hope that it will be useful,
23 * but WITHOUT ANY WARRANTY; without even the implied warranty of
24 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25 * GNU Lesser General Public License for more details.
26 *
27 * You should have received a copy of the GNU Lesser General Public License
28 * along with Shark. If not, see <http://www.gnu.org/licenses/>.
29 *
30 */
31//===========================================================================
32
33#ifndef REMORA_KERNELS_ATLAS_POTRF_H
34#define REMORA_KERNELS_ATLAS_POTRF_H
35
36#include "../cblas/cblas_inc.hpp"
37extern "C"{
38 #include <clapack.h>
39}
40
41namespace remora {
42namespace bindings {
43
44inline int potrf(
45 CBLAS_ORDER const Order, CBLAS_UPLO const Uplo,
46 int const N, float *A, int const lda
47) {
48 return clapack_spotrf(Order, Uplo, N, A, lda);
49}
50
51inline int potrf(
52 CBLAS_ORDER const Order, CBLAS_UPLO const Uplo,
53 int const N, double *A, int const lda
54) {
55 return clapack_dpotrf(Order, Uplo, N, A, lda);
56}
57
58inline int potrf(
59 CBLAS_ORDER const Order, CBLAS_UPLO const Uplo,
60 int const N, std::complex<float>* A, int const lda
61) {
62 return clapack_cpotrf(Order, Uplo, N, static_cast<void *>(A), lda);
63}
64
65inline int potrf(
66 CBLAS_ORDER const Order, CBLAS_UPLO const Uplo,
67 int const N, std::complex<double>* A, int const lda
68) {
69 return clapack_zpotrf(Order, Uplo, N, static_cast<void *>(A), lda);
70}
71
72template <typename Triangular, typename SymmA>
73inline int potrf(
74 matrix_container<SymmA, cpu_tag>& A,
75 std::true_type
76) {
77 CBLAS_UPLO const uplo = Triangular::is_upper ? CblasUpper : CblasLower;
78 CBLAS_ORDER const stor_ord =
79 (CBLAS_ORDER)storage_order<typename SymmA::orientation>::value;
80
81 std::size_t n = A().size1();
82 REMORA_SIZE_CHECK(n == A().size2());
83
84 auto storageA = A().raw_storage();
85 return potrf(
86 stor_ord, uplo, (int)n,
87 storageA.values,
88 storageA.leading_dimension
89 );
90}
91
92template<class Storage, class T>
93struct optimized_potrf_detail {
94 typedef std::false_type type;
95};
96template<>
97struct optimized_potrf_detail <
98 dense_tag,
99 double
100> {
101 typedef std::true_type type;
102};
103template<>
104struct optimized_potrf_detail <
105 dense_tag,
106 float
107> {
108 typedef std::true_type type;
109};
110template<>
111struct optimized_potrf_detail <
112 dense_tag,
113 std::complex<double>
114> {
115 typedef std::true_type type;
116};
117
118template<>
119struct optimized_potrf_detail <
120 dense_tag,
121 std::complex<float>
122> {
123 typedef std::true_type type;
124};
125
126template<class M>
127struct has_optimized_potrf
128 : public optimized_potrf_detail <
129 typename M::storage_type::storage_tag,
130 typename M::value_type
131 > {};
132}}
133#endif