Chi-Tech
fv_02_ordering.cc
Go to the documentation of this file.
1#include "FiniteVolume.h"
2
4
5#include "chi_log.h"
6#include "chi_mpi.h"
7
9
10#define MappingError \
11 "chi_math::SpatialDiscretization_FV::OrderNodes: " \
12 "Error mapping neighbor cells"
13
15{
16
17// ###################################################################
18/**Develops node ordering per location.*/
20{
21 //============================================= Communicate node counts
22 const uint64_t local_num_nodes = ref_grid_.local_cells.size();
23 locJ_block_size_.assign(Chi::mpi.process_count, 0);
24 MPI_Allgather(&local_num_nodes, // sendbuf
25 1,
26 MPI_UINT64_T, // sendcount, sendtype
27 locJ_block_size_.data(), // recvbuf
28 1,
29 MPI_UINT64_T, // recvcount, recvtype
30 Chi::mpi.comm); // comm
31
32 //============================================= Build block addresses
33 locJ_block_address_.assign(Chi::mpi.process_count, 0);
34 uint64_t global_num_nodes = 0;
35 for (int j = 0; j < Chi::mpi.process_count; ++j)
36 {
37 locJ_block_address_[j] = global_num_nodes;
38 global_num_nodes += locJ_block_size_[j];
39 }
40
41 local_block_address_ = locJ_block_address_[Chi::mpi.location_id];
42
43 local_base_block_size_ = local_num_nodes;
44 globl_base_block_size_ = global_num_nodes;
45
46 //============================================= Sort neigbor ids
47 const auto neighbor_gids = ref_grid_.cells.GetGhostGlobalIDs();
48 std::map<uint64_t, std::vector<uint64_t>> sorted_nb_gids;
49 for (uint64_t gid : neighbor_gids)
50 {
51 const auto& cell = ref_grid_.cells[gid];
52 sorted_nb_gids[cell.partition_id_].push_back(gid);
53 }
54
55 //============================================= Communicate neighbor ids
56 // requiring mapping
57 const auto query_nb_gids =
58 chi_mpi_utils::MapAllToAll(sorted_nb_gids, // map
59 MPI_UINT64_T, // datatype
60 Chi::mpi.comm); // comm
61
62 //============================================= Map the ids
63 std::map<uint64_t, std::vector<uint64_t>> mapped_query_nb_gids;
64 for (const auto& pid_list_pair : query_nb_gids)
65 {
66 const uint64_t pid = pid_list_pair.first;
67 const auto& gids = pid_list_pair.second;
68
69 auto& local_ids = mapped_query_nb_gids[pid];
70 local_ids.reserve(gids.size());
71 for (uint64_t gid : gids)
72 {
73 if (not ref_grid_.IsCellLocal(gid)) throw std::logic_error(MappingError);
74
75 const auto& local_cell = ref_grid_.cells[gid];
76 local_ids.push_back(local_cell.local_id_);
77 } // for gid
78 } // for pid_list_pair
79
80 //============================================= Communicate back the mapped
81 // ids
82 const auto mapped_nb_gids =
83 chi_mpi_utils::MapAllToAll(mapped_query_nb_gids, // map
84 MPI_UINT64_T, // datatype
85 Chi::mpi.comm); // comm
86
87 //============================================= Create the neighbor cell
88 // mapping
89 neighbor_cell_local_ids_.clear();
90 for (const auto& pid_list_pair : sorted_nb_gids)
91 {
92 try
93 {
94 const auto& pid = pid_list_pair.first;
95 const auto& gid_list = pid_list_pair.second;
96 const auto& lid_list = mapped_nb_gids.at(pid);
97
98 if (gid_list.size() != lid_list.size())
99 throw std::logic_error(MappingError + std::string(" Size-mismatch."));
100
101 for (size_t i = 0; i < gid_list.size(); ++i)
102 neighbor_cell_local_ids_.insert(
103 std::make_pair(gid_list[i], lid_list[i]));
104 }
105 catch (const std::out_of_range& oor)
106 {
107 throw std::logic_error(MappingError + std::string(" OOR."));
108 }
109 } // for pid_list_pair
110
111 local_base_block_size_ = ref_grid_.local_cells.size();
112 globl_base_block_size_ = ref_grid_.GetGlobalNumberOfCells();
113}
114
115} // namespace chi_math::spatial_discretization
static chi::MPI_Info & mpi
Definition: chi_runtime.h:78
const MPI_Comm & comm
MPI communicator.
Definition: mpi_info.h:28
const int & process_count
Total number of processes.
Definition: mpi_info.h:27
const int & location_id
Current process rank.
Definition: mpi_info.h:26
#define MappingError
std::map< K, std::vector< T > > MapAllToAll(const std::map< K, std::vector< T > > &pid_data_pairs, const MPI_Datatype data_mpi_type, const MPI_Comm communicator=Chi::mpi.comm)