XGC1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
field_decomposition.hpp
Go to the documentation of this file.
1 #ifndef FIELD_DECOMPOSITION_HPP
2 #define FIELD_DECOMPOSITION_HPP
3 #include "space_settings.hpp"
4 #include "NamelistReader.hpp"
5 #include "globals.hpp"
6 #ifdef USE_MPI
7 # include "my_mpi.hpp"
8 #endif
9 
10 // Field decomposition class
11 template<class Device>
13  public:
14 
15 #ifdef USE_MPI
16  // MPI communicators etc
17  MyMPI mpi;
18 #endif
19 
20  // Constants
21  int n_ranks;
26 
31 
32  int first_node;
33  int last_node;
34  int n_nodes;
36  int last_plane;
37  int n_planes;
38 
39  // Views
40  View<int*,CLayout,HostType> map_from_global_intpl;
41  View<int*,CLayout,HostType> all_first_node;
42  View<int*,CLayout,HostType> all_last_node;
43  View<int*,CLayout,HostType> all_first_plane;
44  View<int*,CLayout,HostType> all_last_plane;
45 
47 
48  // Constructor
49  FieldDecomposition(NLReader::NamelistReader& nlr, int nplanes, int nnodes){
50 #ifdef USE_MPI
51  nlr.use_namelist("field_decomp_param");
52  n_ranks = nlr.get<int>("n_ranks", 6);
53  n_phi_domains = nlr.get<int>("n_phi_domains",n_ranks); // Default: no poloidal decomposition
54  n_ghost_planes = nlr.get<int>("n_ghost_planes", 1);
55  n_ghost_vertices = nlr.get<int>("n_ghost_vertices", 3000);
56 
57  // If there is no phi decomposition, don't use ghost planes
58  if(n_phi_domains==1) n_ghost_planes = 0;
59 
60  n_pol_domains = n_ranks/n_phi_domains; // Number of poloidal domains is inferred
61 
62  // Temporarily create a new com which is SML_COMM split into contiguous sets of "n_ranks" processes
63  MPI_Comm comm;
64  MPI_Comm_split(SML_COMM_WORLD, SML_COMM_RANK/n_ranks, SML_COMM_RANK, &comm);
65  // Split up comm using same class as standard decomposition (mpi.comm, mpi.plane_comm and mpi.intpl_comm)
66  mpi = MyMPI(comm, n_phi_domains);
67 printf("\nrank %d mpi.my_rank=%d", SML_COMM_RANK, mpi.my_rank);
68 
69  all_first_node = View<int*,CLayout,HostType>("all_first_node",n_ranks);
70  all_last_node = View<int*,CLayout,HostType>("all_last_node",n_ranks);
71  all_first_plane = View<int*,CLayout,HostType>("all_first_plane",n_ranks);
72  all_last_plane = View<int*,CLayout,HostType>("all_last_plane",n_ranks);
73 
74  // Some checks
75  if(n_pol_domains*n_phi_domains != n_ranks)
76  exit_XGC("\nError in field_decomposition: n_ranks must be divisible by n_phi_domains");
77 
78  // This restriction could be relaxed
79  int nplanes_per_phi_domain = nplanes/n_phi_domains;
80  if(nplanes_per_phi_domain*n_phi_domains != nplanes)
81  exit_XGC("\nError in field_decomposition: n_phi_domains must divide evenly into nplanes");
82 
83  if(mpi.nranks != n_ranks)
84  exit_XGC("\nError in field_decomposition: total XGC ranks must be divisible by field_decomp_param n_ranks");
85 
86  // Go through each domain and determine its size/offset
87  // Planes are contiguous in the communicator so they are the inner loop
88  // Everything here is ZERO-INDEXED
89  int nodes_per_pol_domain = nnodes/n_pol_domains; // FLOOR
90  int nodes_per_phi_domain = nplanes/n_phi_domains; // FLOOR
91  int i = 0;
92  for(int i_pol=0; i_pol<n_pol_domains; i_pol++){
93  int first_owned_node_r = nodes_per_pol_domain*i_pol;
94  int n_owned_nodes_r = (i_pol==n_pol_domains-1 ? (nnodes - first_owned_node_r) : nodes_per_pol_domain);
95  for(int i_phi=0; i_phi<n_phi_domains; i_phi++){
96  int first_owned_plane_r = nodes_per_phi_domain*i_phi;
97  int n_owned_planes_r = (i_phi==n_phi_domains-1 ? (nplanes - first_owned_plane_r) : nodes_per_phi_domain);
98 
99  // Add modulo'd ghost cells for phi, cut off ghost cells for pol
100  all_first_node(i) = std::max(first_owned_node_r - n_ghost_vertices, 0);
101  all_last_node(i) = std::min(first_owned_node_r + n_owned_nodes_r - 1 + n_ghost_vertices, nnodes-1);
102 
103  // If nplanes ends up being the entire domain
104  if((n_owned_planes_r + 2*n_ghost_planes) >= nplanes){
105  all_first_plane(i) = 0;
106  all_last_plane(i) = nplanes-1;
107  }else{
108  all_first_plane(i) = positive_modulo(first_owned_plane_r - n_ghost_planes,nplanes);
109  all_last_plane(i) = (first_owned_plane_r + n_owned_planes_r - 1 + n_ghost_planes)%nplanes;
110  }
111 printf("\nall_first_plane(%d) = (first_owned_plane_r - n_ghost_planes)modnplanes = (%d - %d)mod %d = %d \n", i, first_owned_plane_r, n_ghost_planes, nplanes, all_first_plane(i));
112 
113 
114  // Finally, set for this rank
115  if(i==mpi.my_rank){
116  first_owned_node = first_owned_node_r;
117  nnodes_owned = n_owned_nodes_r;
118  first_owned_plane = first_owned_plane_r;
119  nplanes_owned = n_owned_planes_r;
120 
123  n_nodes = last_node - first_node + 1;
127  }
128 
129  // Advance to next rank
130  i++;
131  }
132  }
133 #endif
134  }
135 
136  KOKKOS_INLINE_FUNCTION int find_domain_owner(int global_plane_index, int nplanes_total, int global_node_index, int nnodes_total) const{
137  // These are enforced to be divisible
138  int planes_per_phi_domain = nplanes_total/n_phi_domains;
139  int intpl_pid = global_plane_index/planes_per_phi_domain;
140 
141  // Poloidal decomposition should work out despite not necessarily being divisible
142  int n_vertices_per_pol_domain = nnodes_total/n_pol_domains;
143  int plane_pid = global_node_index/n_vertices_per_pol_domain;
144 
145  // calculate global pid from plane and intpl coordinates
146  return (intpl_pid + n_phi_domains*plane_pid);
147  }
148 
149  int all_n_nodes(int local_pid) const{
150  return all_last_node(local_pid) - all_first_node(local_pid) + 1;
151  }
152 
153  int all_n_planes(int local_pid, int nplanes) const{
154  return positive_modulo(all_last_plane(local_pid) - all_first_plane(local_pid), nplanes) + 1;
155  }
156 };
157 
158 #endif
int n_ranks
Number of ranks the field will be divided between.
Definition: field_decomposition.hpp:21
int n_phi_domains
Number of domains in the phi direction.
Definition: field_decomposition.hpp:22
int first_plane
First plane belonging to this rank, including ghost planes.
Definition: field_decomposition.hpp:35
MPI_Comm SML_COMM_WORLD
Definition: my_mpi.cpp:4
View< int *, CLayout, HostType > all_last_node
Last node of each rank.
Definition: field_decomposition.hpp:42
int first_owned_plane
First plane belonging to this rank, NOT including ghost planes.
Definition: field_decomposition.hpp:29
int n_ghost_planes
Number of ghost planes on each side of domain.
Definition: field_decomposition.hpp:24
int nplanes_owned
Number of planes belonging to this rank, NOT including ghost planes.
Definition: field_decomposition.hpp:30
T get(const string &param, const T default_val, int val_ind=0)
Definition: NamelistReader.hpp:373
FieldDecomposition()
Definition: field_decomposition.hpp:46
KOKKOS_INLINE_FUNCTION int find_domain_owner(int global_plane_index, int nplanes_total, int global_node_index, int nnodes_total) const
Definition: field_decomposition.hpp:136
Definition: NamelistReader.hpp:193
View< int *, CLayout, HostType > map_from_global_intpl
Rank in this communicator for each rank global intpl.
Definition: field_decomposition.hpp:40
int n_ghost_vertices
Number of ghost vertices on each side of domain.
Definition: field_decomposition.hpp:25
FieldDecomposition(NLReader::NamelistReader &nlr, int nplanes, int nnodes)
Definition: field_decomposition.hpp:49
KOKKOS_INLINE_FUNCTION unsigned positive_modulo(int value, unsigned m)
Definition: globals.hpp:208
Definition: my_mpi.hpp:19
void use_namelist(const string &namelist)
Definition: NamelistReader.hpp:355
int all_n_nodes(int local_pid) const
Definition: field_decomposition.hpp:149
int n_pol_domains
Number of domains in the poloidal plane.
Definition: field_decomposition.hpp:23
int last_plane
Last plane belonging to this rank, including ghost planes.
Definition: field_decomposition.hpp:36
int SML_COMM_RANK
Definition: my_mpi.cpp:5
Definition: field_decomposition.hpp:12
int n_planes
Number of planes belonging to this rank, including ghost planes.
Definition: field_decomposition.hpp:37
View< int *, CLayout, HostType > all_first_plane
First plane of each rank.
Definition: field_decomposition.hpp:43
int all_n_planes(int local_pid, int nplanes) const
Definition: field_decomposition.hpp:153
void exit_XGC(std::string msg)
Definition: globals.hpp:37
View< int *, CLayout, HostType > all_last_plane
Last plane of each rank.
Definition: field_decomposition.hpp:44
int n_nodes
Number of nodes belonging to this rank, including ghost nodes.
Definition: field_decomposition.hpp:34
int first_owned_node
First mesh node belonging to this rank, NOT including ghost nodes.
Definition: field_decomposition.hpp:27
View< int *, CLayout, HostType > all_first_node
First node of each rank.
Definition: field_decomposition.hpp:41
int first_node
First mesh node belonging to this rank, including ghost nodes.
Definition: field_decomposition.hpp:32
int last_node
Last node belonging to this rank, including ghost nodes.
Definition: field_decomposition.hpp:33
int nnodes_owned
Number of nodes belonging to this rank, NOT including ghost nodes.
Definition: field_decomposition.hpp:28