Coverage for  / dolfinx-env / lib / python3.12 / site-packages / io4dolfinx / readers.py: 94%

190 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-02-26 18:16 +0000

1# Copyright (C) 2023 Jørgen Schartum Dokken 

2# 

3# This file is part of io4dolfinx 

4# 

5# SPDX-License-Identifier: MIT 

6 

7from __future__ import annotations 

8 

9import pathlib 

10import typing 

11from pathlib import Path 

12from typing import Any 

13 

14from mpi4py import MPI 

15 

16import basix 

17import dolfinx 

18import numpy as np 

19import numpy.typing as npt 

20import ufl 

21 

22from .backends import ReadMode, get_backend 

23from .comm_helpers import send_dofs_and_recv_values 

24from .utils import ( 

25 check_file_exists, 

26 compute_dofmap_pos, 

27 compute_insert_position, 

28 compute_local_range, 

29 index_owner, 

30) 

31 

32__all__ = ["read_mesh_from_legacy_h5", "read_function_from_legacy_h5", "read_point_data"] 

33 

34 

35def map_dofmap(dofmap: dolfinx.graph.AdjacencyList, bs: int) -> npt.NDArray[np.int64]: 

36 """ 

37 Map xxxyyyzzz to xyzxyz 

38 """ 

39 

40 in_dofmap = dofmap.array 

41 in_offsets = dofmap.offsets 

42 

43 mapped_dofmap = np.empty_like(in_dofmap) 

44 for i in range(len(in_offsets) - 1): 

45 pos_begin, pos_end = ( 

46 in_offsets[i] - in_offsets[0], 

47 in_offsets[i + 1] - in_offsets[0], 

48 ) 

49 dofs_i = in_dofmap[pos_begin:pos_end] 

50 assert (pos_end - pos_begin) % bs == 0 

51 num_dofs_local = int((pos_end - pos_begin) // bs) 

52 for k in range(bs): 

53 for j in range(num_dofs_local): 

54 mapped_dofmap[int(pos_begin + j * bs + k)] = dofs_i[int(num_dofs_local * k + j)] 

55 return mapped_dofmap.astype(np.int64) 

56 

57 

58def send_cells_and_receive_dofmap_index( 

59 filename: pathlib.Path, 

60 comm: MPI.Intracomm, 

61 source_ranks: npt.NDArray[np.int32], 

62 dest_ranks: npt.NDArray[np.int32], 

63 dest_size: npt.NDArray[np.int32], 

64 output_owners: npt.NDArray[np.int32], 

65 input_cells: npt.NDArray[np.int64], 

66 dofmap_pos: npt.NDArray[np.int32], 

67 num_cells_global: np.int64, 

68 dofmap_path: str, 

69 xdofmap_path: str, 

70 bs: int, 

71 backend: str, 

72) -> npt.NDArray[np.int64]: 

73 """ 

74 Given a set of positions in input dofmap, give the global input index of this dofmap entry 

75 in input file. 

76 """ 

77 check_file_exists(filename) 

78 

79 recv_size = np.zeros(len(source_ranks), dtype=np.int32) 

80 mesh_to_data_comm = comm.Create_dist_graph_adjacent( 

81 source_ranks.tolist(), dest_ranks.tolist(), reorder=False 

82 ) 

83 # Send sizes to create data structures for receiving from NeighAlltoAllv 

84 mesh_to_data_comm.Neighbor_alltoall(dest_size, recv_size) 

85 

86 # Sort output for sending and fill send data 

87 out_cells = np.zeros(len(output_owners), dtype=np.int64) 

88 out_pos = np.zeros(len(output_owners), dtype=np.int32) 

89 proc_to_dof = np.zeros_like(input_cells, dtype=np.int32) 

90 insertion_array = compute_insert_position(output_owners, dest_ranks, dest_size) 

91 out_cells[insertion_array] = input_cells 

92 out_pos[insertion_array] = dofmap_pos 

93 proc_to_dof[insertion_array] = np.arange(len(input_cells), dtype=np.int32) 

94 del insertion_array 

95 

96 # Prepare data-structures for receiving 

97 total_incoming = sum(recv_size) 

98 inc_cells = np.zeros(total_incoming, dtype=np.int64) 

99 inc_pos = np.zeros(total_incoming, dtype=np.intc) 

100 

101 # Send data 

102 s_msg = [out_cells, dest_size, MPI.INT64_T] 

103 r_msg = [inc_cells, recv_size, MPI.INT64_T] 

104 mesh_to_data_comm.Neighbor_alltoallv(s_msg, r_msg) 

105 

106 s_msg = [out_pos, dest_size, MPI.INT32_T] 

107 r_msg = [inc_pos, recv_size, MPI.INT32_T] 

108 mesh_to_data_comm.Neighbor_alltoallv(s_msg, r_msg) 

109 mesh_to_data_comm.Free() 

110 

111 backend_cls = get_backend(backend) 

112 # Read dofmap from file 

113 backend_args = {"dofmap": dofmap_path, "offsets": xdofmap_path} 

114 if backend == "adios2": 

115 backend_args.update({"engine": "HDF5"}) 

116 input_dofs = backend_cls.read_dofmap(filename, comm, name="", backend_args=backend_args) 

117 # Map to xyz 

118 mapped_dofmap = map_dofmap(input_dofs, bs).astype(np.int64) 

119 

120 # Extract dofmap data 

121 local_cell_range = compute_local_range(comm, num_cells_global) 

122 input_cell_positions = inc_cells - local_cell_range[0] 

123 in_offsets = input_dofs.offsets 

124 read_pos = (in_offsets[input_cell_positions] + inc_pos - in_offsets[0]).astype(np.int32) 

125 input_dofs = mapped_dofmap[read_pos] 

126 del input_cell_positions, read_pos 

127 

128 # Send input dofs back to owning process 

129 data_to_mesh_comm = comm.Create_dist_graph_adjacent( 

130 dest_ranks.tolist(), source_ranks.tolist(), reorder=False 

131 ) 

132 

133 incoming_global_dofs = np.zeros(sum(dest_size), dtype=np.int64) 

134 s_msg = [input_dofs, recv_size, MPI.INT64_T] 

135 r_msg = [incoming_global_dofs, dest_size, MPI.INT64_T] 

136 data_to_mesh_comm.Neighbor_alltoallv(s_msg, r_msg) 

137 

138 # Sort incoming global dofs as they were inputted 

139 sorted_global_dofs = np.zeros_like(incoming_global_dofs, dtype=np.int64) 

140 assert len(incoming_global_dofs) == len(input_cells) 

141 sorted_global_dofs[proc_to_dof] = incoming_global_dofs 

142 data_to_mesh_comm.Free() 

143 return sorted_global_dofs 

144 

145 

146def read_mesh_from_legacy_h5( 

147 filename: pathlib.Path, 

148 comm: MPI.Intracomm, 

149 group: str, 

150 cell_type: str = "tetrahedron", 

151 backend: str = "adios2", 

152 max_facet_to_cell_links: int = 2, 

153) -> dolfinx.mesh.Mesh: 

154 """ 

155 Read mesh from `h5`-file generated by legacy DOLFIN `HDF5File.write` or `XDMF.write_checkpoint`. 

156 

157 Args: 

158 comm: MPI communicator to distribute mesh over 

159 filename: Path to `h5` or `xdmf` file 

160 group: Name of mesh in `h5`-file 

161 cell_type: What type of cell type, by default tetrahedron. 

162 backend: The IO backend to use when reading the mesh (must 

163 support legacy mesh reading, e.g., "adios2"). 

164 max_facet_to_cell_links: Maximum number of cells a facet 

165 can be connected to. 

166 """ 

167 # Make sure we use the HDF5File and check that the file is present 

168 check_file_exists(filename) 

169 

170 backend_cls = get_backend(backend) 

171 mesh_topology, mesh_geometry, ct = backend_cls.read_legacy_mesh(filename, comm, group) 

172 if ct is not None: 

173 cell_type = ct 

174 # Create DOLFINx mesh 

175 element = basix.ufl.element( 

176 basix.ElementFamily.P, 

177 cell_type, 

178 1, 

179 basix.LagrangeVariant.equispaced, 

180 shape=(mesh_geometry.shape[1],), 

181 ) 

182 domain = ufl.Mesh(element) 

183 

184 try: 

185 return dolfinx.mesh.create_mesh( 

186 comm=MPI.COMM_WORLD, 

187 cells=mesh_topology, 

188 x=mesh_geometry, 

189 e=domain, 

190 partitioner=None, 

191 max_facet_to_cell_links=max_facet_to_cell_links, 

192 ) 

193 except TypeError: 

194 return dolfinx.mesh.create_mesh( 

195 comm=MPI.COMM_WORLD, 

196 cells=mesh_topology, 

197 x=mesh_geometry, 

198 e=domain, 

199 partitioner=None, 

200 ) 

201 

202 # Should change to the commented code below when we require python 

203 # minimum version to be >=3.12 see https://github.com/python/cpython/pull/116198 

204 # import inspect 

205 # sig = inspect.signature(dolfinx.mesh.create_mesh) 

206 # kwargs: dict[str, int] = {} 

207 # if "max_facet_to_cell_links" in list(sig.parameters.keys()): 

208 # kwargs["max_facet_to_cell_links"] = max_facet_to_cell_links 

209 

210 # return dolfinx.mesh.create_mesh( 

211 # comm=MPI.COMM_WORLD, 

212 # cells=mesh_topology, 

213 # x=mesh_geometry, 

214 # e=domain, 

215 # partitioner=None, 

216 # **kwargs, 

217 # ) 

218 

219 

220def read_function_from_legacy_h5( 

221 filename: pathlib.Path, 

222 comm: MPI.Intracomm, 

223 u: dolfinx.fem.Function, 

224 group: str = "mesh", 

225 step: typing.Optional[int] = None, 

226 backend: str = "adios2", 

227): 

228 """ 

229 Read function from a `h5`-file generated by legacy DOLFIN `HDF5File.write` 

230 or `XDMF.write_checkpoint`. 

231 

232 Args: 

233 comm : MPI communicator to distribute mesh over 

234 filename : Path to `h5` or `xdmf` file 

235 u : The function used to stored the read values 

236 group : Group within the `h5` file where the function is stored, by default "mesh" 

237 step : The time step used when saving the checkpoint. If not provided it will assume that 

238 the function is saved as a regular function (i.e with `HDF5File.write`) 

239 backend: The IO backend 

240 """ 

241 

242 # Make sure we use the HDF5File and check that the file is present 

243 filename = pathlib.Path(filename).with_suffix(".h5") 

244 if not filename.is_file(): 

245 raise FileNotFoundError(f"File {filename} does not exist") 

246 

247 V = u.function_space 

248 mesh = u.function_space.mesh 

249 if u.function_space.element.needs_dof_transformations: 

250 raise RuntimeError( 

251 "Function-spaces requiring dof permutations are not compatible with legacy data" 

252 ) 

253 # ----------------------Step 1--------------------------------- 

254 # Compute index of input cells, and position in input dofmap 

255 local_cells, dof_pos = compute_dofmap_pos(u.function_space) 

256 input_cells = mesh.topology.original_cell_index[local_cells] 

257 

258 # Compute mesh->input communicator 

259 # 1.1 Compute mesh->input communicator 

260 num_cells_global = mesh.topology.index_map(mesh.topology.dim).size_global 

261 backend_cls = get_backend(backend) 

262 owners: npt.NDArray[np.int32] 

263 if backend_cls.read_mode == ReadMode.serial: 

264 owners = np.zeros(len(input_cells), dtype=np.int32) 

265 elif backend_cls.read_mode == ReadMode.parallel: 

266 owners = index_owner(V.mesh.comm, input_cells, num_cells_global) 

267 else: 

268 raise NotImplementedError(f"{backend_cls.read_mode} not implemented") 

269 

270 unique_owners, owner_count = np.unique(owners, return_counts=True) 

271 # FIXME: In C++ use NBX to find neighbourhood 

272 _tmp_comm = mesh.comm.Create_dist_graph( 

273 [mesh.comm.rank], [len(unique_owners)], unique_owners, reorder=False 

274 ) 

275 source, dest, _ = _tmp_comm.Get_dist_neighbors() 

276 _tmp_comm.Free() 

277 # Strip out any / 

278 group = group.strip("/") 

279 if step is not None: 

280 group = f"{group}/{group}_{step}" 

281 vector_group = "vector" 

282 else: 

283 vector_group = "vector_0" 

284 

285 # ----------------------Step 2-------------------------------- 

286 # Get global dofmap indices from input process 

287 bs = V.dofmap.bs 

288 num_cells_global = mesh.topology.index_map(mesh.topology.dim).size_global 

289 dofmap_indices = send_cells_and_receive_dofmap_index( 

290 filename, 

291 comm, 

292 np.asarray(source, dtype=np.int32), 

293 np.asarray(dest, dtype=np.int32), 

294 owner_count.astype(np.int32), 

295 owners, 

296 input_cells, 

297 dof_pos, 

298 num_cells_global, 

299 f"/{group}/cell_dofs", 

300 f"/{group}/x_cell_dofs", 

301 bs, 

302 backend=backend, 

303 ) 

304 

305 # ----------------------Step 3--------------------------------- 

306 dof_owner: npt.NDArray[np.int32] 

307 if backend_cls.read_mode == ReadMode.serial: 

308 dof_owner = np.zeros(len(dofmap_indices), dtype=np.int32) 

309 elif backend_cls.read_mode == ReadMode.parallel: 

310 # Compute owner of global dof on distributed input data 

311 num_dof_global = V.dofmap.index_map_bs * V.dofmap.index_map.size_global 

312 dof_owner = index_owner(comm=mesh.comm, indices=dofmap_indices, N=num_dof_global) 

313 else: 

314 raise NotImplementedError(f"{backend_cls.read_mode} not implemented") 

315 

316 # Create MPI neigh comm to owner. 

317 # NOTE: USE NBX in C++ 

318 

319 # Read input data 

320 local_array, starting_pos = backend_cls.read_hdf5_array( 

321 comm, filename, f"/{group}/{vector_group}", backend_args=None 

322 ) 

323 

324 # Send global dof indices to correct input process, and receive value of given dof 

325 local_values = send_dofs_and_recv_values( 

326 dofmap_indices, dof_owner, comm, local_array, starting_pos 

327 ) 

328 

329 # ----------------------Step 4--------------------------------- 

330 # Populate local part of array and scatter forward 

331 u.x.array[: len(local_values)] = local_values 

332 u.x.scatter_forward() 

333 

334 

335def create_geometry_function_space(mesh: dolfinx.mesh.Mesh, N: int) -> dolfinx.fem.FunctionSpace: 

336 """Reconstruct a vector space with the N components using the geometry dofmap to ensure 

337 a 1-1 mapping between mesh nodes and DOFs.""" 

338 geom_imap = mesh.geometry.index_map() 

339 geom_dofmap = mesh.geometry.dofmap 

340 ufl_domain = mesh.ufl_domain() 

341 assert ufl_domain is not None 

342 sub_el = ufl_domain.ufl_coordinate_element().sub_elements[0] 

343 adj_list = dolfinx.cpp.graph.AdjacencyList_int32(geom_dofmap) 

344 

345 value_shape: tuple[int, ...] 

346 if N == 1: 

347 ufl_el = sub_el 

348 value_shape = () 

349 else: 

350 ufl_el = basix.ufl.blocked_element(sub_el, shape=(N,)) 

351 value_shape = (N,) 

352 

353 if ufl_el.dtype == np.float32: 

354 _fe_constructor = dolfinx.cpp.fem.FiniteElement_float32 

355 _fem_constructor = dolfinx.cpp.fem.FunctionSpace_float32 

356 elif ufl_el.dtype == np.float64: 

357 _fe_constructor = dolfinx.cpp.fem.FiniteElement_float64 

358 _fem_constructor = dolfinx.cpp.fem.FunctionSpace_float64 

359 else: 

360 raise RuntimeError(f"Unsupported type {ufl_el.dtype}") 

361 try: 

362 cpp_el = _fe_constructor(ufl_el.basix_element._e, block_shape=value_shape, symmetric=False) 

363 except TypeError: 

364 cpp_el = _fe_constructor(ufl_el.basix_element._e, block_size=N, symmetric=False) 

365 dof_layout = dolfinx.cpp.fem.create_element_dof_layout(cpp_el, []) 

366 cpp_dofmap = dolfinx.cpp.fem.DofMap(dof_layout, geom_imap, N, adj_list, N) 

367 

368 # Create function space 

369 try: 

370 cpp_space = _fem_constructor(mesh._cpp_object, cpp_el, cpp_dofmap) 

371 except TypeError: 

372 cpp_space = _fem_constructor(mesh._cpp_object, cpp_el, cpp_dofmap, value_shape=value_shape) 

373 

374 return dolfinx.fem.FunctionSpace(mesh, ufl_el, cpp_space) 

375 

376 

377def read_point_data( 

378 filename: Path | str, 

379 name: str, 

380 mesh: dolfinx.mesh.Mesh, 

381 time: float | None = None, 

382 backend_args: dict[str, Any] | None = None, 

383 backend: str = "xdmf", 

384) -> dolfinx.fem.Function: 

385 """Read data from the nodes of a mesh. 

386 

387 Note: 

388 Backend has to implement {py:class}`io4dolfinx.backends.read_cell_data`. 

389 

390 Args: 

391 filename: Path to file 

392 name: Name of point data 

393 mesh: The corresponding :py:class:`dolfinx.mesh.Mesh`. 

394 time: Time-step to read from. 

395 

396 Returns: 

397 A function in the space equivalent to the mesh 

398 coordinate element (up to shape). 

399 """ 

400 

401 backend_cls = get_backend(backend) 

402 dataset, local_range_start = backend_cls.read_point_data( 

403 filename=filename, name=name, comm=mesh.comm, time=time, backend_args=backend_args 

404 ) 

405 

406 num_components = dataset.shape[1] 

407 

408 # Create appropriate function space (based on coordinate map) 

409 V = create_geometry_function_space(mesh, num_components) 

410 uh = dolfinx.fem.Function(V, name=name, dtype=dataset.dtype) 

411 # Assume that mesh is first order for now 

412 x_dofmap = mesh.geometry.dofmap 

413 igi = np.array(mesh.geometry.input_global_indices, dtype=np.int64) 

414 

415 # This is dependent on how the data is read in. If distributed equally this is correct 

416 global_geom_input = igi[x_dofmap] 

417 

418 if backend_cls.read_mode == ReadMode.parallel: 

419 num_nodes_global = mesh.geometry.index_map().size_global 

420 global_geom_owner = index_owner(mesh.comm, global_geom_input.reshape(-1), num_nodes_global) 

421 elif backend_cls.read_mode == ReadMode.serial: 

422 # This is correct if everything is read in on rank 0 

423 global_geom_owner = np.zeros(len(global_geom_input.flatten()), dtype=np.int32) 

424 else: 

425 raise NotImplementedError(f"{backend_cls.read_mode} not implemented") 

426 

427 for i in range(num_components): 

428 arr_i = send_dofs_and_recv_values( 

429 global_geom_input.reshape(-1), 

430 global_geom_owner, 

431 mesh.comm, 

432 dataset[:, i], 

433 local_range_start, 

434 ) 

435 dof_pos = x_dofmap.reshape(-1) * num_components + i 

436 uh.x.array[dof_pos] = arr_i 

437 uh.x.scatter_forward() 

438 return uh 

439 

440 

441def read_cell_data( 

442 filename: Path | str, 

443 name: str, 

444 mesh: dolfinx.mesh.Mesh, 

445 time: float | None = None, 

446 backend_args: dict[str, Any] | None = None, 

447 backend: str = "xdmf", 

448) -> dolfinx.fem.Function: 

449 """Read data from the nodes of a mesh. 

450 

451 Note: 

452 Backend has to implement {py:class}`io4dolfinx.backends.read_cell_data`. 

453 

454 Args: 

455 filename: Path to file 

456 name: Name of point data 

457 mesh: The corresponding :py:class:`dolfinx.mesh.Mesh`. 

458 time: Time-step to read from. 

459 

460 Returns: 

461 A function in a DG-0 space on the mesh. The cells not found in input is set to zero. 

462 """ 

463 

464 backend_cls = get_backend(backend) 

465 

466 topology, dofs = backend_cls.read_cell_data( 

467 filename=filename, name=name, comm=mesh.comm, time=time, backend_args=backend_args 

468 ) 

469 num_components = dofs.shape[1] 

470 shape: tuple[int, ...] 

471 if num_components == 1: 

472 shape = () 

473 else: 

474 shape = (num_components,) 

475 V = dolfinx.fem.functionspace(mesh, ("DG", 0, shape)) 

476 u = dolfinx.fem.Function(V, dtype=dofs.dtype) 

477 data_array = u.x.array.reshape(-1, num_components) 

478 for i in range(num_components): 

479 local_entities, local_values = dolfinx.io.distribute_entity_data( 

480 mesh, mesh.topology.dim, topology, dofs[:, i].copy() 

481 ) 

482 adj = dolfinx.graph.adjacencylist(local_entities) 

483 order = np.arange(len(local_values), dtype=np.int32) 

484 mt = dolfinx.mesh.meshtags_from_entities(mesh, mesh.topology.dim, adj, order) 

485 data_array[mt.indices, i] = local_values[mt.values] 

486 u.x.scatter_forward() 

487 return u