PaStiX Handbook  6.3.2
sequential_ctrsm.c
Go to the documentation of this file.
1 /**
2  *
3  * @file sequential_ctrsm.c
4  *
5  * @copyright 2012-2023 Bordeaux INP, CNRS (LaBRI UMR 5800), Inria,
6  * Univ. Bordeaux. All rights reserved.
7  *
8  * @version 6.3.2
9  * @author Pascal Henon
10  * @author Xavier Lacoste
11  * @author Pierre Ramet
12  * @author Mathieu Faverge
13  * @author Tony Delarue
14  * @author Vincent Bridonneau
15  * @author Alycia Lisito
16  * @date 2023-07-29
17  *
18  * @generated from /builds/solverstack/pastix/sopalin/sequential_ztrsm.c, normal z -> c, Wed Dec 13 12:09:47 2023
19  *
20  **/
21 #include "common.h"
22 #include "blend/solver.h"
23 #include "bcsc/bcsc.h"
24 #include "bcsc/bcsc_c.h"
25 #include "sopalin/sopalin_data.h"
26 #include "pastix_ccores.h"
27 
28 #if defined(PASTIX_WITH_STARPU)
29 #include "starpu/pastix_cstarpu.h"
30 #endif
31 
32 #if defined(PASTIX_WITH_MPI)
33 #include "sopalin/coeftab.h"
34 #endif
35 
36 /**
37  * @brief Arguments for the solve.
38  */
39 struct args_ctrsm_t
40 {
41  pastix_data_t *pastix_data;
42  const args_solve_t *enum_list;
43  sopalin_data_t *sopalin_data;
44  pastix_rhs_t rhsb;
45  volatile int32_t taskcnt;
46 };
47 
48 /**
49  *******************************************************************************
50  *
51  * @brief Applies the Sequential Forward or Backward solve.
52  *
53  *******************************************************************************
54  *
55  * @param[in] pastix_data
56  * The pastix_data structure.
57  *
58  * @param[in] enums
59  * Enums needed for the solve.
60  *
61  * @param[in] sopalin_data
62  * The SolverMatrix structure from PaStiX.
63  *
64  * @param[in] rhsb
65  * The pointer to the rhs data structure that holds the vectors of the
66  * right hand side.
67  *
68  *******************************************************************************/
69 void
71  const args_solve_t *enums,
72  sopalin_data_t *sopalin_data,
73  pastix_rhs_t rhsb )
74 {
75  SolverMatrix *datacode = sopalin_data->solvmtx;
76  SolverCblk *cblk;
77  pastix_int_t i, cblknbr;
78 
79  /* Backward like */
80  if ( enums->solve_step == PastixSolveBackward ) {
81  cblknbr = (enums->mode == PastixSolvModeLocal) ? datacode->cblkschur : datacode->cblknbr;
82 
83  cblk = datacode->cblktab + cblknbr - 1;
84  for (i=0; i<cblknbr; i++, cblk--){
85  if( cblk->cblktype & CBLK_RECV ){
86  cpucblk_csend_rhs_backward( datacode, cblk, rhsb );
87  continue;
88  }
89 
90  if( cblk->cblktype & CBLK_FANIN ){
91  cpucblk_crecv_rhs_backward( datacode, cblk, rhsb );
92  }
93 
94  solve_cblk_ctrsmsp_backward( enums, datacode, cblk, rhsb );
95  }
96  }
97  /* Forward like */
98  else {
99  pastix_complex32_t *work;
100  MALLOC_INTERN( work, datacode->colmax * rhsb->n, pastix_complex32_t );
101 
102  cblknbr = (enums->mode == PastixSolvModeSchur) ? datacode->cblknbr : datacode->cblkschur;
103  cblk = datacode->cblktab;
104  for (i=0; i<cblknbr; i++, cblk++){
105  if( cblk->cblktype & CBLK_FANIN ){
106  cpucblk_csend_rhs_forward( datacode, cblk, rhsb );
107  continue;
108  }
109 
110  if( cblk->cblktype & CBLK_RECV ) {
111  cpucblk_crecv_rhs_forward( datacode, cblk, work, rhsb );
112  continue;
113  }
114 
115  solve_cblk_ctrsmsp_forward( enums, datacode, cblk, rhsb );
116  }
117 
118  memFree_null(work);
119  }
120 
121 #if !defined(NDEBUG)
122  {
123  pastix_int_t nbbuffers = datacode->faninnbr + datacode->recvnbr;
124  int i;
125  for( i=0; i<nbbuffers; i++ ) {
126  assert( rhsb->cblkb[i] == NULL );
127  }
128  }
129 #endif
130  (void)pastix_data;
131 }
132 
133 /**
134  *******************************************************************************
135  *
136  * @brief Applies the Static Forward or Backward solve.
137  *
138  *******************************************************************************
139  *
140  * @param[in] ctx
141  * Thread structure of the execution context of one instance of the
142  * scheduler.
143  *
144  * @param[in] args
145  * Arguments for the Static solve.
146  *
147  *******************************************************************************/
148 void
149 thread_ctrsm_static( isched_thread_t *ctx,
150  void *args )
151 {
152  struct args_ctrsm_t *arg = (struct args_ctrsm_t*)args;
153  sopalin_data_t *sopalin_data = arg->sopalin_data;
154  SolverMatrix *datacode = sopalin_data->solvmtx;
155  pastix_rhs_t rhsb = arg->rhsb;
156  const args_solve_t *enums = arg->enum_list;
157  pastix_int_t thrd_size = (pastix_int_t)ctx->global_ctx->world_size;
158  pastix_int_t thrd_rank = (pastix_int_t)ctx->rank;
159  SolverCblk *cblk;
160  Task *t;
161  pastix_int_t i, ii;
162  pastix_int_t tasknbr, *tasktab;
163  pastix_int_t cblkfirst, cblklast;
164 
165  /* Computes range to update the ctrbnbr */
166  cblkfirst = (datacode->cblknbr / thrd_size ) * thrd_rank;
167  cblklast = (datacode->cblknbr / thrd_size ) * (thrd_rank + 1);
168  if ( thrd_rank == (thrd_size-1) ) {
169  cblklast = datacode->cblknbr;
170  }
171 
172  tasknbr = datacode->ttsknbr[thrd_rank];
173  tasktab = datacode->ttsktab[thrd_rank];
174 
175  /* Backward like */
176  if ( enums->solve_step == PastixSolveBackward ) {
177  /* Init ctrbcnt in parallel */
178  cblk = datacode->cblktab + cblkfirst;
179  for (ii=cblkfirst; ii<cblklast; ii++, cblk++) {
180  if ( (cblk->cblktype & CBLK_IN_SCHUR) && (enums->mode != PastixSolvModeSchur) ) {
181  cblk->ctrbcnt = 0;
182  }
183  else {
184  cblk->ctrbcnt = cblk[1].fblokptr - cblk[0].fblokptr - 1;
185  }
186  }
187  isched_barrier_wait( &(ctx->global_ctx->barrier) );
188 
189  for (ii=tasknbr-1; ii>=0; ii--) {
190  i = tasktab[ii];
191  t = datacode->tasktab + i;
192  cblk = datacode->cblktab + t->cblknum;
193 
194  /* Wait for incoming dependencies */
195  if ( cpucblk_cincoming_rhs_bwd_deps( thrd_rank, enums, datacode, cblk, rhsb ) ) {
196  continue;
197  }
198 
199  /* Computes */
200  solve_cblk_ctrsmsp_backward( enums, datacode, cblk, rhsb );
201  }
202  }
203  /* Forward like */
204  else {
205  /* Init ctrbcnt in parallel */
206  cblk = datacode->cblktab + cblkfirst;
207  for (ii=cblkfirst; ii<cblklast; ii++, cblk++) {
208  cblk->ctrbcnt = cblk[1].brownum - cblk[0].brownum;
209  }
210  isched_barrier_wait( &(ctx->global_ctx->barrier) );
211 
212  for (ii=0; ii<tasknbr; ii++) {
213  i = tasktab[ii];
214  t = datacode->tasktab + i;
215  cblk = datacode->cblktab + t->cblknum;
216 
217  if ( (cblk->cblktype & CBLK_IN_SCHUR) &&
218  (enums->mode != PastixSolvModeSchur) ) {
219  continue;
220  }
221 
222  /* Wait for incoming dependencies */
223  if ( cpucblk_cincoming_rhs_fwd_deps( thrd_rank, enums,
224  datacode, cblk, rhsb ) ) {
225  continue;
226  }
227  /* Computes */
228  solve_cblk_ctrsmsp_forward( enums, datacode, cblk, rhsb );
229  }
230  }
231 }
232 
233 /**
234  *******************************************************************************
235  *
236  * @brief Applies the Static Forward or Backward solve.
237  *
238  *******************************************************************************
239  *
240  * @param[in] pastix_data
241  * The pastix_data structure.
242  *
243  * @param[in] enums
244  * Enums needed for the solve.
245  *
246  * @param[in] sopalin_data
247  * The SolverMatrix structure from PaStiX.
248  *
249  * @param[in] rhsb
250  * The pointer to the rhs data structure that holds the vectors of the
251  * right hand side.
252  *
253  *******************************************************************************/
254 void
256  const args_solve_t *enums,
257  sopalin_data_t *sopalin_data,
258  pastix_rhs_t rhsb )
259 {
260  struct args_ctrsm_t args_ctrsm = { pastix_data, enums, sopalin_data, rhsb, 0 };
261  isched_parallel_call( pastix_data->isched, thread_ctrsm_static, &args_ctrsm );
262 }
263 
264 /**
265  *******************************************************************************
266  *
267  * @brief Applies the Dynamic Forward or Backward solve.
268  *
269  *******************************************************************************
270  *
271  * @param[in] ctx
272  * Thread structure of the execution context of one instance of the
273  * scheduler.
274  *
275  * @param[in] args
276  * Arguments for the Static solve.
277  *
278  *******************************************************************************/
279 void
280 thread_ctrsm_dynamic( isched_thread_t *ctx,
281  void *args )
282 {
283  struct args_ctrsm_t *arg = (struct args_ctrsm_t*)args;
284  pastix_data_t *pastix_data = arg->pastix_data;
285  sopalin_data_t *sopalin_data = arg->sopalin_data;
286  SolverMatrix *datacode = sopalin_data->solvmtx;
287  const args_solve_t *enums = arg->enum_list;
288  pastix_rhs_t rhsb = arg->rhsb;
289  pastix_int_t thrd_size = (pastix_int_t)ctx->global_ctx->world_size;
290  pastix_int_t thrd_rank = (pastix_int_t)ctx->rank;
291  int32_t local_taskcnt = 0;
292  SolverCblk *cblk;
293  pastix_queue_t *computeQueue;
294  pastix_int_t ii;
295  pastix_int_t tasknbr;
296  pastix_int_t cblkfirst, cblklast, cblknum;
297 
298  /* Computes range to update the ctrbnbr */
299  cblkfirst = (datacode->cblknbr / thrd_size ) * thrd_rank;
300  cblklast = (datacode->cblknbr / thrd_size ) * (thrd_rank + 1);
301  if ( thrd_rank == (thrd_size-1) ) {
302  cblklast = datacode->cblknbr;
303  }
304 
305  MALLOC_INTERN( datacode->computeQueue[thrd_rank], 1, pastix_queue_t );
306 
307  tasknbr = datacode->ttsknbr[thrd_rank];
308  computeQueue = datacode->computeQueue[thrd_rank];
309  pqueueInit( computeQueue, tasknbr );
310 
311  /* Backward like */
312  if ( enums->solve_step == PastixSolveBackward ) {
313  /* Init ctrbcnt in parallel */
314  cblk = datacode->cblktab + cblkfirst;
315  for (ii=cblkfirst; ii<cblklast; ii++, cblk++) {
316  if ( (cblk->cblktype & CBLK_IN_SCHUR) && (enums->mode != PastixSolvModeSchur) ) {
317  cblk->ctrbcnt = 0;
318  }
319  else {
320  cblk->ctrbcnt = cblk[1].fblokptr - cblk[0].fblokptr - 1;
321  }
322  if ( !(cblk->ctrbcnt) && !(cblk->cblktype & (CBLK_FANIN | CBLK_RECV)) ) {
323  pqueuePush1( computeQueue, ii, - cblk->priority );
324  }
325  }
326  isched_barrier_wait( &(ctx->global_ctx->barrier) );
327 
328  while( arg->taskcnt > 0 ) {
329  cblknum = pqueuePop(computeQueue);
330 
331 #if defined(PASTIX_WITH_MPI)
332  /* Nothing to do, let's make progress on communications */
333  if ( ( pastix_data->inter_node_procnbr > 1 ) && ( cblknum == -1 ) ) {
334  cpucblk_cmpi_rhs_bwd_progress( enums, datacode, rhsb, thrd_rank );
335  cblknum = pqueuePop(computeQueue);
336  }
337 #endif
338 
339  /* No more local job, let's steal our neighbors */
340  if ( cblknum == -1 ) {
341  if ( local_taskcnt ) {
342  pastix_atomic_sub_32b( &(arg->taskcnt), local_taskcnt );
343  local_taskcnt = 0;
344  }
345  cblknum = stealQueue( datacode, thrd_rank, thrd_size );
346  }
347 
348  /* Still no job, let's loop again */
349  if ( cblknum == -1 ) {
350  continue;
351  }
352 
353  cblk = datacode->cblktab + cblknum;
354  cblk->threadid = thrd_rank;
355 
356  /* Computes */
357  solve_cblk_ctrsmsp_backward( enums, datacode, cblk, rhsb );
358  local_taskcnt++;
359  }
360  }
361  /* Forward like */
362  else {
363  /* Init ctrbcnt in parallel */
364  cblk = datacode->cblktab + cblkfirst;
365  for (ii=cblkfirst; ii<cblklast; ii++, cblk++) {
366  cblk->ctrbcnt = cblk[1].brownum - cblk[0].brownum;
367  if ( !(cblk->ctrbcnt) ) {
368  if (!(cblk->cblktype & (CBLK_FANIN|CBLK_RECV)) ) {
369  pqueuePush1( computeQueue, ii, cblk->priority );
370  }
371  }
372  }
373  isched_barrier_wait( &(ctx->global_ctx->barrier) );
374 
375  while( arg->taskcnt > 0 ) {
376  cblknum = pqueuePop(computeQueue);
377 
378 #if defined(PASTIX_WITH_MPI)
379  /* Nothing to do, let's make progress on communications */
380  if ( ( pastix_data->inter_node_procnbr > 1 ) && ( cblknum == -1 ) ) {
381  cpucblk_cmpi_rhs_fwd_progress( enums, datacode, rhsb, thrd_rank );
382  cblknum = pqueuePop(computeQueue);
383  }
384 #endif
385 
386  /* No more local job, let's steal our neighbors */
387  if ( cblknum == -1 ) {
388  if ( local_taskcnt ) {
389  pastix_atomic_sub_32b( &(arg->taskcnt), local_taskcnt );
390  local_taskcnt = 0;
391  }
392  cblknum = stealQueue( datacode, thrd_rank,
393  thrd_size );
394  }
395 
396  /* Still no job, let's loop again */
397  if ( cblknum == -1 ) {
398  continue;
399  }
400 
401  cblk = datacode->cblktab + cblknum;
402  cblk->threadid = thrd_rank;
403 
404  if ( (cblk->cblktype & CBLK_IN_SCHUR) &&
405  (enums->mode != PastixSolvModeSchur) ) {
406  continue;
407  }
408 
409  /* Computes */
410  solve_cblk_ctrsmsp_forward( enums, datacode, cblk, rhsb );
411  local_taskcnt++;
412  }
413  }
414  /* Make sure that everyone is done before freeing */
415  isched_barrier_wait( &(ctx->global_ctx->barrier) );
416  assert( computeQueue->used == 0 );
417  pqueueExit( computeQueue );
418  memFree_null( computeQueue );
419 
420  (void)pastix_data;
421 }
422 
423 /**
424  *******************************************************************************
425  *
426  * @brief Applies the Dynamic Forward or Backward solve.
427  *
428  *******************************************************************************
429  *
430  * @param[in] pastix_data
431  * The pastix_data structure.
432  *
433  * @param[in] enums
434  * Enums needed for the solve.
435  *
436  * @param[in] sopalin_data
437  * The SolverMatrix structure from PaStiX.
438  *
439  * @param[in] rhsb
440  * The pointer to the rhs data structure that holds the vectors of the
441  * right hand side.
442  *
443  *******************************************************************************/
444 void
446  const args_solve_t *enums,
447  sopalin_data_t *sopalin_data,
448  pastix_rhs_t rhsb )
449 {
450  SolverMatrix *datacode = sopalin_data->solvmtx;
451  int32_t taskcnt = datacode->tasknbr - (datacode->cblknbr - datacode->cblkschur);
452  struct args_ctrsm_t args_ctrsm = { pastix_data, enums, sopalin_data, rhsb, taskcnt };
453 
454  /* Reintroduce Schur tasks in the counter for backward */
455  if ( enums->solve_step == PastixSolveBackward ) {
456  args_ctrsm.taskcnt = datacode->cblknbr - datacode->recvnbr;
457  }
458 
459  /* Allocates the computeQueue */
460  MALLOC_INTERN( datacode->computeQueue,
461  pastix_data->isched->world_size, pastix_queue_t * );
462 
463  isched_parallel_call( pastix_data->isched, thread_ctrsm_dynamic, &args_ctrsm );
464 
465  memFree_null( datacode->computeQueue );
466 }
467 
468 #if defined(PASTIX_WITH_MPI)
469 /**
470  *******************************************************************************
471  *
472  * @brief Applies the Reuntime Forward or Backward solve.
473  *
474  *******************************************************************************
475  *
476  * @param[in] pastix_data
477  * The pastix_data structure.
478  *
479  * @param[in] enums
480  * Enums needed for the solve.
481  *
482  * @param[in] sopalin_data
483  * The SolverMatrix structure from PaStiX.
484  *
485  * @param[in] rhsb
486  * The pointer to the rhs data structure that holds the vectors of the
487  * right hand side.
488  *
489  *******************************************************************************/
490 void
491 runtime_ctrsm( pastix_data_t *pastix_data,
492  const args_solve_t *enums,
493  sopalin_data_t *sopalin_data,
494  pastix_rhs_t rhsb )
495 {
496  SolverMatrix *datacode = sopalin_data->solvmtx;
497  SolverCblk *cblk;
498  pastix_int_t i, cblknbr;
499 
500  /* Collect the matrix on node 0 */
501  coeftab_gather( datacode, datacode->solv_comm, 0, PastixComplex32 );
502 
503  if ( sopalin_data->solvmtx->clustnum == 0 ) {
504 
505  /* Backward like */
506  if ( enums->solve_step == PastixSolveBackward ) {
507  cblknbr = (enums->mode == PastixSolvModeLocal) ? datacode->cblkschur : datacode->cblknbr;
508 
509  cblk = datacode->cblktab + cblknbr - 1;
510  for ( i=0; i<cblknbr; i++, cblk-- ) {
511  assert( !(cblk->cblktype & (CBLK_FANIN | CBLK_RECV)) );
512  solve_cblk_ctrsmsp_backward( enums, datacode, cblk, rhsb );
513  }
514  }
515  /* Forward like */
516  else {
517  cblknbr = (enums->mode == PastixSolvModeSchur) ? datacode->cblknbr : datacode->cblkschur;
518  cblk = datacode->cblktab;
519  for (i=0; i<cblknbr; i++, cblk++){
520  solve_cblk_ctrsmsp_forward( enums, datacode, cblk, rhsb );
521  }
522  }
523 
524  /* Free the gathered coefficients of the matrix */
525  coeftab_nullify( datacode );
526  }
527  else {
528  memset( rhsb->b, 0, rhsb->ld * rhsb->n * sizeof(pastix_complex32_t) );
529  }
530 
531  bvec_callreduce( pastix_data, rhsb->b );
532 }
533 #endif
534 
535 #ifndef DOXYGEN_SHOULD_SKIP_THIS
536 static void (*ctrsm_table[5])(pastix_data_t *, const args_solve_t *,
537  sopalin_data_t *, pastix_rhs_t) =
538 {
540  static_ctrsm,
541 #if defined(PASTIX_WITH_PARSEC)
542  NULL, /* parsec_ctrsm not yet implemented */
543 #else
544  NULL,
545 #endif
546 #if defined(PASTIX_WITH_STARPU)
547  starpu_ctrsm,
548 #else
549  NULL,
550 #endif
552 };
553 #endif /* DOXYGEN_SHOULD_SKIP_THIS */
554 
555 /**
556  *******************************************************************************
557  *
558  * @brief Calls the sequential, static, dynamic or runtime solve according to
559  * scheduler.
560  *
561  *******************************************************************************
562  *
563  * @param[in] pastix_data
564  * The pastix_data structure.
565  *
566  * @param[in] side
567  * Specify whether the off-diagonal blocks appear on the left or right
568  * in the equation. It has to be either PastixLeft or PastixRight.
569  *
570  * @param[in] uplo
571  * Specify whether the off-diagonal blocks are upper or lower
572  * triangular. It has to be either PastixUpper or PastixLower.
573  *
574  * @param[in] trans
575  * Specify the transposition used for the off-diagonal blocks. It has
576  * to be either PastixTrans or PastixConjTrans.
577  *
578  * @param[in] diag
579  * Specify if the off-diagonal blocks are unit triangular. It has to be
580  * either PastixUnit or PastixNonUnit.
581  *
582  * @param[in] sopalin_data
583  * The SolverMatrix structure from PaStiX.
584  *
585  * @param[in] rhsb
586  * The pointer to the rhs data structure that holds the vectors of the
587  * right hand side.
588  *
589  *******************************************************************************/
590 void
592  pastix_side_t side,
593  pastix_uplo_t uplo,
594  pastix_trans_t trans,
595  pastix_diag_t diag,
596  sopalin_data_t *sopalin_data,
597  pastix_rhs_t rhsb )
598 {
599  int sched = pastix_data->iparm[IPARM_SCHEDULER];
600  void (*ctrsm)( pastix_data_t *, const args_solve_t *,
601  sopalin_data_t *, pastix_rhs_t ) = ctrsm_table[ sched ];
602  solve_step_t solve_step = compute_solve_step( side, uplo, trans );
603  args_solve_t *enum_list = malloc( sizeof( args_solve_t ) );
604 
605  enum_list->solve_step = solve_step;
606  enum_list->mode = pastix_data->iparm[IPARM_SCHUR_SOLV_MODE];
607  enum_list->side = side;
608  enum_list->uplo = uplo;
609  enum_list->trans = trans;
610  enum_list->diag = diag;
611 
612  if (ctrsm == NULL) {
613  ctrsm = static_ctrsm;
614  }
615 
616  /* parsec_ctrsm and starpu_ctrsm not implemented yet, runtime_ctrsm works only for starpu and
617  parsec with mpi in distributed and replicated cases */
618 #if defined ( PASTIX_WITH_MPI )
619  if( pastix_data->inter_node_procnbr > 1 ) {
620  if( (sched == PastixSchedStarPU) || (sched == PastixSchedParsec) ) {
621  ctrsm = runtime_ctrsm;
622  }
623  }
624 #endif
625 
626  if ( (sched == PastixSchedStatic) ||
627  (sched == PastixSchedDynamic) )
628  {
629  solverRequestInit( solve_step, sopalin_data->solvmtx );
630  solverRhsRecvInit( solve_step, sopalin_data->solvmtx, PastixComplex32, rhsb );
631  }
632 
633  ctrsm( pastix_data, enum_list, sopalin_data, rhsb );
634 
635  if ( (sched == PastixSchedStatic) ||
636  (sched == PastixSchedDynamic) )
637  {
638  if ( solve_step == PastixSolveForward ) {
639  cpucblk_crequest_rhs_fwd_cleanup( enum_list, sched, sopalin_data->solvmtx, rhsb );
640  }
641  else {
642  cpucblk_crequest_rhs_bwd_cleanup( enum_list, sched, sopalin_data->solvmtx, rhsb );
643  }
644  solverRequestExit( sopalin_data->solvmtx );
645  solverRhsRecvExit( sopalin_data->solvmtx );
646  }
647 
648 #if defined(PASTIX_WITH_MPI)
649  MPI_Barrier( pastix_data->inter_node_comm );
650 #endif
651  free(enum_list);
652 }
BEGIN_C_DECLS typedef int pastix_int_t
Definition: datatypes.h:51
float _Complex pastix_complex32_t
Definition: datatypes.h:76
void bvec_callreduce(const pastix_data_t *pastix_data, pastix_complex32_t *y)
Apply an all reduce of the vector on all nodes.
volatile pastix_int_t used
Definition: queue.h:40
static void pqueuePush1(pastix_queue_t *q, pastix_int_t elt, double key1)
Push an element with a single key.
Definition: queue.h:64
void pqueueExit(pastix_queue_t *)
Free the structure associated to the queue.
Definition: queue.c:110
static pastix_int_t pqueuePop(pastix_queue_t *q)
Pop the head of the queue whithout returning the keys.
Definition: queue.h:75
int pqueueInit(pastix_queue_t *, pastix_int_t)
Initialize the queue structure with an initial space to store the elements.
Definition: queue.c:81
Queue structure.
Definition: queue.h:38
void solverRequestExit(SolverMatrix *solvmtx)
Free the arrays related to the requests.
Definition: solver.c:481
void solverRhsRecvExit(SolverMatrix *solvmtx)
Frees the array linked to pending reception.
Definition: solver.c:676
void solverRhsRecvInit(solve_step_t solve_step, SolverMatrix *solvmtx, pastix_coeftype_t flttype, pastix_rhs_t rhsb)
Allocates the reception buffer, and initiate the first persistant reception.
Definition: solver.c:633
void solverRequestInit(solve_step_t solve_step, SolverMatrix *solvmtx)
Instanciate the arrays for the requests according to the scheduler.
Definition: solver.c:433
void cpucblk_crequest_rhs_fwd_cleanup(const args_solve_t *enums, pastix_int_t sched, SolverMatrix *solvmtx, pastix_rhs_t rhsb)
Waitall routine for current cblk request.
int cpucblk_cincoming_rhs_fwd_deps(int rank, const args_solve_t *enums, SolverMatrix *solvmtx, SolverCblk *cblk, pastix_rhs_t rhsb)
Wait for incoming dependencies, and return when cblk->ctrbcnt has reached 0.
int cpucblk_cincoming_rhs_bwd_deps(int rank, const args_solve_t *enums, SolverMatrix *solvmtx, SolverCblk *cblk, pastix_rhs_t rhsb)
Wait for incoming dependencies, and return when cblk->ctrbcnt has reached 0.
void cpucblk_crecv_rhs_forward(const SolverMatrix *solvmtx, SolverCblk *cblk, pastix_complex32_t *work, pastix_rhs_t rhsb)
Receive the rhs associated to a cblk->lcolidx to the remote node.
void cpucblk_crequest_rhs_bwd_cleanup(const args_solve_t *enums, pastix_int_t sched, SolverMatrix *solvmtx, pastix_rhs_t rhsb)
Waitall routine for current cblk request.
void cpucblk_csend_rhs_backward(const SolverMatrix *solvmtx, SolverCblk *cblk, pastix_rhs_t rhsb)
Send the rhs associated to a cblk->lcolidx to the remote node.
void cpucblk_csend_rhs_forward(const SolverMatrix *solvmtx, SolverCblk *cblk, pastix_rhs_t rhsb)
Send the rhs associated to a cblk->lcolidx to the remote node.
void cpucblk_crecv_rhs_backward(const SolverMatrix *solvmtx, SolverCblk *cblk, pastix_rhs_t rhsb)
Receive the rhs associated to a cblk->lcolidx to the remote node.
void solve_cblk_ctrsmsp_backward(const args_solve_t *enums, SolverMatrix *datacode, SolverCblk *cblk, pastix_rhs_t rhsb)
Apply a backward solve related to one cblk to all the right hand side.
void solve_cblk_ctrsmsp_forward(const args_solve_t *enums, SolverMatrix *datacode, const SolverCblk *cblk, pastix_rhs_t rhsb)
Apply a forward solve related to one cblk to all the right hand side.
enum pastix_diag_e pastix_diag_t
Diagonal.
enum pastix_uplo_e pastix_uplo_t
Upper/Lower part.
enum pastix_side_e pastix_side_t
Side of the operation.
enum pastix_trans_e pastix_trans_t
Transpostion.
@ IPARM_SCHEDULER
Definition: api.h:117
@ IPARM_SCHUR_SOLV_MODE
Definition: api.h:107
@ PastixSchedStatic
Definition: api.h:335
@ PastixSchedDynamic
Definition: api.h:338
@ PastixSchedStarPU
Definition: api.h:337
@ PastixSchedParsec
Definition: api.h:336
void ** cblkb
Definition: pastixdata.h:157
int inter_node_procnbr
Definition: pastixdata.h:82
pastix_int_t * iparm
Definition: pastixdata.h:69
pastix_int_t ld
Definition: pastixdata.h:155
isched_t * isched
Definition: pastixdata.h:85
PASTIX_Comm inter_node_comm
Definition: pastixdata.h:77
pastix_int_t n
Definition: pastixdata.h:154
Main PaStiX data structure.
Definition: pastixdata.h:67
Main PaStiX RHS structure.
Definition: pastixdata.h:150
void starpu_ctrsm(pastix_data_t *pastix_data, const args_solve_t *enums, sopalin_data_t *sopalin_data, pastix_rhs_t b)
Apply the TRSM solve (StarPU version).
Definition: starpu_ctrsm.c:296
void thread_ctrsm_dynamic(isched_thread_t *ctx, void *args)
Applies the Dynamic Forward or Backward solve.
void sequential_ctrsm(pastix_data_t *pastix_data, const args_solve_t *enums, sopalin_data_t *sopalin_data, pastix_rhs_t rhsb)
Applies the Sequential Forward or Backward solve.
void sopalin_ctrsm(pastix_data_t *pastix_data, pastix_side_t side, pastix_uplo_t uplo, pastix_trans_t trans, pastix_diag_t diag, sopalin_data_t *sopalin_data, pastix_rhs_t rhsb)
Calls the sequential, static, dynamic or runtime solve according to scheduler.
void dynamic_ctrsm(pastix_data_t *pastix_data, const args_solve_t *enums, sopalin_data_t *sopalin_data, pastix_rhs_t rhsb)
Applies the Dynamic Forward or Backward solve.
void static_ctrsm(pastix_data_t *pastix_data, const args_solve_t *enums, sopalin_data_t *sopalin_data, pastix_rhs_t rhsb)
Applies the Static Forward or Backward solve.
void thread_ctrsm_static(isched_thread_t *ctx, void *args)
Applies the Static Forward or Backward solve.
pastix_int_t cblknum
Definition: solver.h:121
pastix_int_t brownum
Definition: solver.h:166
pastix_int_t priority
Definition: solver.h:177
static pastix_int_t stealQueue(SolverMatrix *solvmtx, int rank, int nbthreads)
Task stealing method.
Definition: solver.h:466
static solve_step_t compute_solve_step(pastix_side_t side, pastix_uplo_t uplo, pastix_trans_t trans)
Computes the current solve step.
Definition: solver.h:301
pastix_int_t cblknbr
Definition: solver.h:208
pastix_int_t faninnbr
Definition: solver.h:209
volatile int32_t ctrbcnt
Definition: solver.h:158
SolverBlok * fblokptr
Definition: solver.h:163
pastix_int_t recvnbr
Definition: solver.h:212
int threadid
Definition: solver.h:176
SolverCblk *restrict cblktab
Definition: solver.h:222
int8_t cblktype
Definition: solver.h:159
enum solve_step_e solve_step_t
Tags used in MPI communications.
pastix_int_t cblkschur
Definition: solver.h:217
Arguments for the solve.
Definition: solver.h:85
Solver column block structure.
Definition: solver.h:156
Solver column block structure.
Definition: solver.h:200
The task structure for the numerical factorization.
Definition: solver.h:118