PaStiX Handbook  6.2.1
cpucblk_smpi_rhs.c
Go to the documentation of this file.
1 /**
2  *
3  * @file cpucblk_smpi_rhs.c
4  *
5  * Precision dependent routines to manag communications for the solve part.
6  *
7  * @copyright 2015-2021 Bordeaux INP, CNRS (LaBRI UMR 5800), Inria,
8  * Univ. Bordeaux. All rights reserved.
9  *
10  * @version 6.2.0
11  * @author Pierre Ramet
12  * @author Mathieu Faverge
13  * @author Tony Delarue
14  * @date 2021-01-03
15  *
16  * @generated from /builds/solverstack/pastix/kernels/cpucblk_zmpi_rhs.c, normal z -> s, Tue Apr 12 09:38:41 2022
17  *
18  **/
19 #include "common/common.h"
20 #include "blend/solver.h"
21 #include "pastix_scores.h"
22 #include "pastix_slrcores.h"
23 
24 /**
25  *******************************************************************************
26  *
27  * @brief Send the rhs associated to a cblk->lcolidx to the remote node.
28  *
29  *******************************************************************************
30  *
31  * @param[in] solvmtx
32  * The solver matrix holding the communicator.
33  *
34  * @param[in] cblk
35  * The cblk which defines the part to sent.
36  *
37  * @param[in] b
38  * The rhs that will be sent to the cblk->ownerid
39  *
40  *******************************************************************************/
41 void
42 cpucblk_ssend_rhs_forward( const SolverMatrix *solvmtx,
43  SolverCblk *cblk,
44  float *b )
45 {
46 #if defined(PASTIX_WITH_MPI)
47  pastix_int_t colnbr = cblk_colnbr(cblk);
48  int rc;
49 
50  assert( colnbr <= solvmtx->colmax );
51  assert( cblk->cblktype & CBLK_FANIN );
52 
53 #if defined (PASTIX_DEBUG_MPI)
54  fprintf( stderr, "[%2d] RHS Fwd: Send cblk %ld to %2d at index %ld of size %ld\n",
55  solvmtx->clustnum, (long)cblk->gcblknum, cblk->ownerid,
56  (long)cblk->lcolidx, (long)colnbr );
57 #endif
58 
59  rc = MPI_Send( b + cblk->lcolidx, colnbr, PASTIX_MPI_FLOAT,
60  cblk->ownerid, cblk->gcblknum, solvmtx->solv_comm );
61  assert( rc == MPI_SUCCESS );
62 
63  (void)rc;
64 #else
65  (void)solvmtx;
66  (void)cblk;
67  (void)b;
68 #endif
69 }
70 
71 /**
72  *******************************************************************************
73  *
74  * @brief Send the rhs associated to a cblk->lcolidx to the remote node.
75  *
76  *******************************************************************************
77  *
78  * @param[in] solvmtx
79  * The solver matrix holding the communicator.
80  *
81  * @param[in] cblk
82  * The cblk which defines the part to sent.
83  *
84  * @param[in] b
85  * The rhs that will be sent to the cblk->ownerid
86  *
87  *******************************************************************************/
88 void
89 cpucblk_ssend_rhs_backward( const SolverMatrix *solvmtx,
90  SolverCblk *cblk,
91  float *b )
92 {
93 #if defined(PASTIX_WITH_MPI)
94  pastix_int_t colnbr = cblk_colnbr(cblk);
95  int rc;
96 
97  assert( colnbr <= solvmtx->colmax );
98  assert( cblk->cblktype & CBLK_RECV );
99 
100 #if defined (PASTIX_DEBUG_MPI)
101  fprintf( stderr, "[%2d] RHS Bwd: Send cblk %ld to %2d at index %ld of size %ld\n",
102  solvmtx->clustnum, (long)cblk->gcblknum, cblk->ownerid,
103  (long)cblk->lcolidx, (long)colnbr );
104 #endif
105 
106  rc = MPI_Send( b + cblk->lcolidx, colnbr, PASTIX_MPI_FLOAT,
107  cblk->ownerid, cblk->gcblknum, solvmtx->solv_comm );
108 
109  assert( rc == MPI_SUCCESS );
110  (void)rc;
111 #else
112  (void)solvmtx;
113  (void)cblk;
114  (void)b;
115 #endif
116 }
117 
118 /**
119  *******************************************************************************
120  *
121  * @brief Receive the rhs associated to a cblk->lcolidx to the remote node.
122  *
123  *******************************************************************************
124  *
125  * @param[in] solvmtx
126  * The solver matrix holding the communicator.
127  *
128  * @param[in] cblk
129  * The cblk which may define the part to sent.
130  *
131  * @param[inout] b
132  * The rhs that will be receive from the cblk->ownerid.
133  *
134  *******************************************************************************/
135 void
136 cpucblk_srecv_rhs_backward( const SolverMatrix *solvmtx,
137  SolverCblk *cblk,
138  float *b )
139 {
140 #if defined(PASTIX_WITH_MPI)
141  MPI_Status status;
142  pastix_int_t colnbr = cblk_colnbr(cblk);
143  int rc;
144 
145  assert( colnbr <= solvmtx->colmax );
146  assert( cblk->cblktype & CBLK_FANIN );
147 
148 #if defined (PASTIX_DEBUG_MPI)
149  fprintf( stderr, "[%2d] RHS Bwd: Recv cblk %ld from %ld at index %ld of size %ld\n",
150  solvmtx->clustnum, (long)cblk->gcblknum, (long)cblk->ownerid,
151  (long)cblk->lcolidx, (long)colnbr );
152 #endif
153 
154  rc = MPI_Recv( b + cblk->lcolidx, colnbr, PASTIX_MPI_FLOAT,
155  cblk->ownerid, cblk->gcblknum, solvmtx->solv_comm, &status );
156  assert( rc == MPI_SUCCESS );
157 
158 #if defined (PASTIX_DEBUG_MPI)
159  fprintf( stderr, "[%2d] RHS Bwd: Received cblk %ld from %2d\n",
160  solvmtx->clustnum, (long)cblk->gcblknum, status.MPI_SOURCE );
161 #endif
162 
163  (void)rc;
164 #else
165  (void)solvmtx;
166  (void)cblk;
167  (void)b;
168 #endif
169 }
170 
171 /**
172  *******************************************************************************
173  *
174  * @brief Receive the rhs associated to a cblk->lcolidx to the remote node.
175  *
176  *******************************************************************************
177  *
178  * @param[in] solvmtx
179  * The solver matrix holding the communicator.
180  *
181  * @param[in] cblk
182  * The cblk which may define the part to sent.
183  *
184  * @param[inout] work
185  * The temporary buffer to receive the remote data
186  *
187  * @param[inout] b
188  * The rhs that will be updated by the reception.
189  *
190  * @param[in] ldb
191  * The leading dimension of the matrix b.
192  *
193  *******************************************************************************/
194 void
195 cpucblk_srecv_rhs_forward( const SolverMatrix *solvmtx,
196  SolverCblk *cblk,
197  float *work,
198  pastix_int_t nrhs,
199  float *b,
200  pastix_int_t ldb )
201 {
202 #if defined(PASTIX_WITH_MPI)
203  MPI_Status status;
204  pastix_int_t colnbr = cblk_colnbr(cblk);
205  int rc;
206 
207  assert( colnbr <= solvmtx->colmax );
208  assert( cblk->cblktype & CBLK_RECV );
209 
210 #if defined (PASTIX_DEBUG_MPI)
211  fprintf( stderr, "[%2d] RHS Fwd: Recv cblk %ld from %ld at index %ld of size %ld\n",
212  solvmtx->clustnum, (long)cblk->gcblknum, (long)cblk->ownerid,
213  (long)cblk->lcolidx, (long)colnbr );
214 #endif
215 
216  rc = MPI_Recv( work, colnbr, PASTIX_MPI_FLOAT,
217  cblk->ownerid, cblk->gcblknum, solvmtx->solv_comm, &status );
218  assert( rc == MPI_SUCCESS );
219 
220 #if defined (PASTIX_DEBUG_MPI)
221  fprintf( stderr, "[%2d] RHS Fwd: Received cblk %ld from %2d\n",
222  solvmtx->clustnum, (long)cblk->gcblknum, status.MPI_SOURCE );
223 #endif
224 
225  core_sgeadd( PastixNoTrans, colnbr, nrhs,
226  1., work, ldb,
227  1., b + cblk->lcolidx, ldb );
228 
229  (void)rc;
230 #else
231  (void)solvmtx;
232  (void)cblk;
233  (void)work;
234  (void)nrhs;
235  (void)b;
236  (void)ldb;
237 #endif
238 }
solver_cblk_s::ownerid
int ownerid
Definition: solver.h:146
solver.h
cblk_colnbr
static pastix_int_t cblk_colnbr(const SolverCblk *cblk)
Compute the number of columns in a column block.
Definition: solver.h:247
solver_cblk_s
Solver column block structure.
Definition: solver.h:127
PastixNoTrans
@ PastixNoTrans
Definition: api.h:424
cpucblk_ssend_rhs_backward
void cpucblk_ssend_rhs_backward(const SolverMatrix *solvmtx, SolverCblk *cblk, float *b)
Send the rhs associated to a cblk->lcolidx to the remote node.
Definition: cpucblk_smpi_rhs.c:89
pastix_scores.h
cpucblk_ssend_rhs_forward
void cpucblk_ssend_rhs_forward(const SolverMatrix *solvmtx, SolverCblk *cblk, float *b)
Send the rhs associated to a cblk->lcolidx to the remote node.
Definition: cpucblk_smpi_rhs.c:42
cpucblk_srecv_rhs_backward
void cpucblk_srecv_rhs_backward(const SolverMatrix *solvmtx, SolverCblk *cblk, float *b)
Receive the rhs associated to a cblk->lcolidx to the remote node.
Definition: cpucblk_smpi_rhs.c:136
pastix_slrcores.h
solver_cblk_s::gcblknum
pastix_int_t gcblknum
Definition: solver.h:140
cpucblk_srecv_rhs_forward
void cpucblk_srecv_rhs_forward(const SolverMatrix *solvmtx, SolverCblk *cblk, float *work, pastix_int_t nrhs, float *b, pastix_int_t ldb)
Receive the rhs associated to a cblk->lcolidx to the remote node.
Definition: cpucblk_smpi_rhs.c:195
solver_cblk_s::cblktype
int8_t cblktype
Definition: solver.h:130
solver_cblk_s::lcolidx
pastix_int_t lcolidx
Definition: solver.h:136
core_sgeadd
int core_sgeadd(pastix_trans_t trans, pastix_int_t M, pastix_int_t N, float alpha, const float *A, pastix_int_t LDA, float beta, float *B, pastix_int_t LDB)
Add two matrices together.
Definition: core_sgeadd.c:78