Coverage for  / dolfinx-env / lib / python3.12 / site-packages / io4dolfinx / readers.py: 94%

192 statements  

« prev     ^ index     » next       coverage.py v7.14.0, created at 2026-05-12 11:21 +0000

1# Copyright (C) 2023 Jørgen Schartum Dokken 

2# 

3# This file is part of io4dolfinx 

4# 

5# SPDX-License-Identifier: MIT 

6 

7from __future__ import annotations 

8 

9import pathlib 

10import typing 

11from pathlib import Path 

12from typing import Any 

13 

14from mpi4py import MPI 

15 

16import basix 

17import dolfinx 

18import numpy as np 

19import numpy.typing as npt 

20import ufl 

21 

22from .backends import ReadMode, get_backend 

23from .comm_helpers import send_dofs_and_recv_values 

24from .utils import ( 

25 check_file_exists, 

26 compute_dofmap_pos, 

27 compute_insert_position, 

28 compute_local_range, 

29 index_owner, 

30) 

31 

32__all__ = ["read_mesh_from_legacy_h5", "read_function_from_legacy_h5", "read_point_data"] 

33 

34 

35def map_dofmap(dofmap: dolfinx.graph.AdjacencyList, bs: int) -> npt.NDArray[np.int64]: 

36 """ 

37 Map xxxyyyzzz to xyzxyz 

38 """ 

39 

40 in_dofmap = dofmap.array 

41 in_offsets = dofmap.offsets 

42 

43 mapped_dofmap = np.empty_like(in_dofmap) 

44 for i in range(len(in_offsets) - 1): 

45 pos_begin, pos_end = ( 

46 in_offsets[i] - in_offsets[0], 

47 in_offsets[i + 1] - in_offsets[0], 

48 ) 

49 dofs_i = in_dofmap[pos_begin:pos_end] 

50 assert (pos_end - pos_begin) % bs == 0 

51 num_dofs_local = int((pos_end - pos_begin) // bs) 

52 for k in range(bs): 

53 for j in range(num_dofs_local): 

54 mapped_dofmap[int(pos_begin + j * bs + k)] = dofs_i[int(num_dofs_local * k + j)] 

55 return mapped_dofmap.astype(np.int64) 

56 

57 

58def send_cells_and_receive_dofmap_index( 

59 filename: pathlib.Path, 

60 comm: MPI.Intracomm, 

61 source_ranks: npt.NDArray[np.int32], 

62 dest_ranks: npt.NDArray[np.int32], 

63 dest_size: npt.NDArray[np.int32], 

64 output_owners: npt.NDArray[np.int32], 

65 input_cells: npt.NDArray[np.int64], 

66 dofmap_pos: npt.NDArray[np.int32], 

67 num_cells_global: np.int64, 

68 dofmap_path: str, 

69 xdofmap_path: str, 

70 bs: int, 

71 backend: str, 

72) -> npt.NDArray[np.int64]: 

73 """ 

74 Given a set of positions in input dofmap, give the global input index of this dofmap entry 

75 in input file. 

76 """ 

77 check_file_exists(filename) 

78 

79 recv_size = np.zeros(len(source_ranks), dtype=np.int32) 

80 mesh_to_data_comm = comm.Create_dist_graph_adjacent( 

81 source_ranks.tolist(), dest_ranks.tolist(), reorder=False 

82 ) 

83 # Send sizes to create data structures for receiving from NeighAlltoAllv 

84 mesh_to_data_comm.Neighbor_alltoall(dest_size, recv_size) 

85 

86 # Sort output for sending and fill send data 

87 out_cells = np.zeros(len(output_owners), dtype=np.int64) 

88 out_pos = np.zeros(len(output_owners), dtype=np.int32) 

89 proc_to_dof = np.zeros_like(input_cells, dtype=np.int32) 

90 insertion_array = compute_insert_position(output_owners, dest_ranks, dest_size) 

91 out_cells[insertion_array] = input_cells 

92 out_pos[insertion_array] = dofmap_pos 

93 proc_to_dof[insertion_array] = np.arange(len(input_cells), dtype=np.int32) 

94 del insertion_array 

95 

96 # Prepare data-structures for receiving 

97 total_incoming = sum(recv_size) 

98 inc_cells = np.zeros(total_incoming, dtype=np.int64) 

99 inc_pos = np.zeros(total_incoming, dtype=np.intc) 

100 

101 # Send data 

102 s_msg = [out_cells, dest_size, MPI.INT64_T] 

103 r_msg = [inc_cells, recv_size, MPI.INT64_T] 

104 mesh_to_data_comm.Neighbor_alltoallv(s_msg, r_msg) 

105 

106 s_msg = [out_pos, dest_size, MPI.INT32_T] 

107 r_msg = [inc_pos, recv_size, MPI.INT32_T] 

108 mesh_to_data_comm.Neighbor_alltoallv(s_msg, r_msg) 

109 mesh_to_data_comm.Free() 

110 

111 backend_cls = get_backend(backend) 

112 # Read dofmap from file 

113 backend_args = {"dofmap": dofmap_path, "offsets": xdofmap_path} 

114 if backend == "adios2": 

115 backend_args.update({"engine": "HDF5"}) 

116 input_dofs = backend_cls.read_dofmap(filename, comm, name="", backend_args=backend_args) 

117 # Map to xyz 

118 mapped_dofmap = map_dofmap(input_dofs, bs).astype(np.int64) 

119 

120 # Extract dofmap data 

121 local_cell_range = compute_local_range(comm, num_cells_global) 

122 input_cell_positions = inc_cells - local_cell_range[0] 

123 in_offsets = input_dofs.offsets 

124 read_pos = (in_offsets[input_cell_positions] + inc_pos - in_offsets[0]).astype(np.int32) 

125 input_dofs = mapped_dofmap[read_pos] 

126 del input_cell_positions, read_pos 

127 

128 # Send input dofs back to owning process 

129 data_to_mesh_comm = comm.Create_dist_graph_adjacent( 

130 dest_ranks.tolist(), source_ranks.tolist(), reorder=False 

131 ) 

132 

133 incoming_global_dofs = np.zeros(sum(dest_size), dtype=np.int64) 

134 s_msg = [input_dofs, recv_size, MPI.INT64_T] 

135 r_msg = [incoming_global_dofs, dest_size, MPI.INT64_T] 

136 data_to_mesh_comm.Neighbor_alltoallv(s_msg, r_msg) 

137 

138 # Sort incoming global dofs as they were inputted 

139 sorted_global_dofs = np.zeros_like(incoming_global_dofs, dtype=np.int64) 

140 assert len(incoming_global_dofs) == len(input_cells) 

141 sorted_global_dofs[proc_to_dof] = incoming_global_dofs 

142 data_to_mesh_comm.Free() 

143 return sorted_global_dofs 

144 

145 

146def read_mesh_from_legacy_h5( 

147 filename: pathlib.Path, 

148 comm: MPI.Intracomm, 

149 group: str, 

150 cell_type: str = "tetrahedron", 

151 backend: str = "adios2", 

152 max_facet_to_cell_links: int = 2, 

153) -> dolfinx.mesh.Mesh: 

154 """ 

155 Read mesh from `h5`-file generated by legacy DOLFIN `HDF5File.write` or `XDMF.write_checkpoint`. 

156 

157 Args: 

158 comm: MPI communicator to distribute mesh over 

159 filename: Path to `h5` or `xdmf` file 

160 group: Name of mesh in `h5`-file 

161 cell_type: What type of cell type, by default tetrahedron. 

162 backend: The IO backend to use when reading the mesh (must 

163 support legacy mesh reading, e.g., "adios2"). 

164 max_facet_to_cell_links: Maximum number of cells a facet 

165 can be connected to. 

166 """ 

167 # Make sure we use the HDF5File and check that the file is present 

168 check_file_exists(filename) 

169 

170 backend_cls = get_backend(backend) 

171 mesh_topology, mesh_geometry, ct = backend_cls.read_legacy_mesh(filename, comm, group) 

172 if ct is not None: 

173 cell_type = ct 

174 # Create DOLFINx mesh 

175 element = basix.ufl.element( 

176 basix.ElementFamily.P, 

177 cell_type, 

178 1, 

179 basix.LagrangeVariant.equispaced, 

180 shape=(mesh_geometry.shape[1],), 

181 ) 

182 domain = ufl.Mesh(element) 

183 

184 try: 

185 return dolfinx.mesh.create_mesh( 

186 comm=MPI.COMM_WORLD, 

187 cells=mesh_topology, 

188 x=mesh_geometry, 

189 e=domain, 

190 partitioner=None, 

191 max_facet_to_cell_links=max_facet_to_cell_links, 

192 ) 

193 except TypeError: 

194 return dolfinx.mesh.create_mesh( 

195 comm=MPI.COMM_WORLD, 

196 cells=mesh_topology, 

197 x=mesh_geometry, 

198 e=domain, 

199 partitioner=None, 

200 ) 

201 

202 # Should change to the commented code below when we require python 

203 # minimum version to be >=3.12 see https://github.com/python/cpython/pull/116198 

204 # import inspect 

205 # sig = inspect.signature(dolfinx.mesh.create_mesh) 

206 # kwargs: dict[str, int] = {} 

207 # if "max_facet_to_cell_links" in list(sig.parameters.keys()): 

208 # kwargs["max_facet_to_cell_links"] = max_facet_to_cell_links 

209 

210 # return dolfinx.mesh.create_mesh( 

211 # comm=MPI.COMM_WORLD, 

212 # cells=mesh_topology, 

213 # x=mesh_geometry, 

214 # e=domain, 

215 # partitioner=None, 

216 # **kwargs, 

217 # ) 

218 

219 

220def read_function_from_legacy_h5( 

221 filename: pathlib.Path, 

222 comm: MPI.Intracomm, 

223 u: dolfinx.fem.Function, 

224 group: str = "mesh", 

225 step: typing.Optional[int] = None, 

226 vector_group: str | None = None, 

227 backend: str = "adios2", 

228): 

229 """ 

230 Read function from a `h5`-file generated by legacy DOLFIN `HDF5File.write` 

231 or `XDMF.write_checkpoint`. 

232 

233 

234 Args: 

235 comm : MPI communicator to distribute mesh over 

236 filename : Path to `h5` or `xdmf` file 

237 u : The function used to stored the read values 

238 group : Group within the `h5` file where the function is stored, by default "mesh" 

239 step : The time step used when saving the checkpoint. If not provided it will assume that 

240 the function is saved as a regular function (i.e with `HDF5File.write`) 

241 backend: The IO backend 

242 """ 

243 

244 # Make sure we use the HDF5File and check that the file is present 

245 filename = pathlib.Path(filename) 

246 if filename.suffix == ".xdmf": 

247 filename = filename.with_suffix(".h5") 

248 if not filename.is_file(): 

249 raise FileNotFoundError(f"File {filename} does not exist") 

250 

251 V = u.function_space 

252 mesh = u.function_space.mesh 

253 if u.function_space.element.needs_dof_transformations: 

254 raise RuntimeError( 

255 "Function-spaces requiring dof permutations are not compatible with legacy data" 

256 ) 

257 # ----------------------Step 1--------------------------------- 

258 # Compute index of input cells, and position in input dofmap 

259 local_cells, dof_pos = compute_dofmap_pos(u.function_space) 

260 input_cells = mesh.topology.original_cell_index[local_cells] 

261 

262 # Compute mesh->input communicator 

263 # 1.1 Compute mesh->input communicator 

264 num_cells_global = mesh.topology.index_map(mesh.topology.dim).size_global 

265 backend_cls = get_backend(backend) 

266 owners: npt.NDArray[np.int32] 

267 if backend_cls.read_mode == ReadMode.serial: 

268 owners = np.zeros(len(input_cells), dtype=np.int32) 

269 elif backend_cls.read_mode == ReadMode.parallel: 

270 owners = index_owner(V.mesh.comm, input_cells, num_cells_global) 

271 else: 

272 raise NotImplementedError(f"{backend_cls.read_mode} not implemented") 

273 

274 unique_owners, owner_count = np.unique(owners, return_counts=True) 

275 # FIXME: In C++ use NBX to find neighbourhood 

276 _tmp_comm = mesh.comm.Create_dist_graph( 

277 [mesh.comm.rank], [len(unique_owners)], unique_owners, reorder=False 

278 ) 

279 source, dest, _ = _tmp_comm.Get_dist_neighbors() 

280 _tmp_comm.Free() 

281 # Strip out any / 

282 group = group.strip("/") 

283 if step is not None: 

284 group = f"{group}/{group}_{step}" 

285 vector_group = vector_group or "vector" 

286 else: 

287 vector_group = vector_group or "vector_0" 

288 

289 # ----------------------Step 2-------------------------------- 

290 # Get global dofmap indices from input process 

291 bs = V.dofmap.bs 

292 num_cells_global = mesh.topology.index_map(mesh.topology.dim).size_global 

293 dofmap_indices = send_cells_and_receive_dofmap_index( 

294 filename, 

295 comm, 

296 np.asarray(source, dtype=np.int32), 

297 np.asarray(dest, dtype=np.int32), 

298 owner_count.astype(np.int32), 

299 owners, 

300 input_cells, 

301 dof_pos, 

302 num_cells_global, 

303 f"/{group}/cell_dofs", 

304 f"/{group}/x_cell_dofs", 

305 bs, 

306 backend=backend, 

307 ) 

308 

309 # ----------------------Step 3--------------------------------- 

310 dof_owner: npt.NDArray[np.int32] 

311 if backend_cls.read_mode == ReadMode.serial: 

312 dof_owner = np.zeros(len(dofmap_indices), dtype=np.int32) 

313 elif backend_cls.read_mode == ReadMode.parallel: 

314 # Compute owner of global dof on distributed input data 

315 num_dof_global = V.dofmap.index_map_bs * V.dofmap.index_map.size_global 

316 dof_owner = index_owner(comm=mesh.comm, indices=dofmap_indices, N=num_dof_global) 

317 else: 

318 raise NotImplementedError(f"{backend_cls.read_mode} not implemented") 

319 

320 # Create MPI neigh comm to owner. 

321 # NOTE: USE NBX in C++ 

322 

323 # Read input data 

324 local_array, starting_pos = backend_cls.read_hdf5_array( 

325 comm, filename, f"/{group}/{vector_group}", backend_args=None 

326 ) 

327 

328 # Send global dof indices to correct input process, and receive value of given dof 

329 local_values = send_dofs_and_recv_values( 

330 dofmap_indices, dof_owner, comm, local_array, starting_pos 

331 ) 

332 

333 # ----------------------Step 4--------------------------------- 

334 # Populate local part of array and scatter forward 

335 u.x.array[: len(local_values)] = local_values 

336 u.x.scatter_forward() 

337 

338 

339def create_geometry_function_space(mesh: dolfinx.mesh.Mesh, N: int) -> dolfinx.fem.FunctionSpace: 

340 """Reconstruct a vector space with the N components using the geometry dofmap to ensure 

341 a 1-1 mapping between mesh nodes and DOFs.""" 

342 geom_imap = mesh.geometry.index_map() 

343 geom_dofmap = mesh.geometry.dofmap 

344 ufl_domain = mesh.ufl_domain() 

345 assert ufl_domain is not None 

346 sub_el = ufl_domain.ufl_coordinate_element().sub_elements[0] 

347 adj_list = dolfinx.cpp.graph.AdjacencyList_int32(geom_dofmap) 

348 

349 value_shape: tuple[int, ...] 

350 if N == 1: 

351 ufl_el = sub_el 

352 value_shape = () 

353 else: 

354 ufl_el = basix.ufl.blocked_element(sub_el, shape=(N,)) 

355 value_shape = (N,) 

356 

357 if ufl_el.dtype == np.float32: 

358 _fe_constructor = dolfinx.cpp.fem.FiniteElement_float32 

359 _fem_constructor = dolfinx.cpp.fem.FunctionSpace_float32 

360 elif ufl_el.dtype == np.float64: 

361 _fe_constructor = dolfinx.cpp.fem.FiniteElement_float64 

362 _fem_constructor = dolfinx.cpp.fem.FunctionSpace_float64 

363 else: 

364 raise RuntimeError(f"Unsupported type {ufl_el.dtype}") 

365 try: 

366 cpp_el = _fe_constructor(ufl_el.basix_element._e, block_shape=value_shape, symmetric=False) 

367 except TypeError: 

368 cpp_el = _fe_constructor(ufl_el.basix_element._e, block_size=N, symmetric=False) 

369 dof_layout = dolfinx.cpp.fem.create_element_dof_layout(cpp_el, []) 

370 cpp_dofmap = dolfinx.cpp.fem.DofMap(dof_layout, geom_imap, N, adj_list, N) 

371 

372 # Create function space 

373 try: 

374 cpp_space = _fem_constructor(mesh._cpp_object, cpp_el, cpp_dofmap) 

375 except TypeError: 

376 cpp_space = _fem_constructor(mesh._cpp_object, cpp_el, cpp_dofmap, value_shape=value_shape) 

377 

378 return dolfinx.fem.FunctionSpace(mesh, ufl_el, cpp_space) 

379 

380 

381def read_point_data( 

382 filename: Path | str, 

383 name: str, 

384 mesh: dolfinx.mesh.Mesh, 

385 time: float | None = None, 

386 backend_args: dict[str, Any] | None = None, 

387 backend: str = "xdmf", 

388) -> dolfinx.fem.Function: 

389 """Read data from the nodes of a mesh. 

390 

391 Note: 

392 Backend has to implement {py:class}`io4dolfinx.backends.read_cell_data`. 

393 

394 Args: 

395 filename: Path to file 

396 name: Name of point data 

397 mesh: The corresponding :py:class:`dolfinx.mesh.Mesh`. 

398 time: Time-step to read from. 

399 

400 Returns: 

401 A function in the space equivalent to the mesh 

402 coordinate element (up to shape). 

403 """ 

404 

405 backend_cls = get_backend(backend) 

406 dataset, local_range_start = backend_cls.read_point_data( 

407 filename=filename, name=name, comm=mesh.comm, time=time, backend_args=backend_args 

408 ) 

409 

410 num_components = dataset.shape[1] 

411 

412 # Create appropriate function space (based on coordinate map) 

413 V = create_geometry_function_space(mesh, num_components) 

414 uh = dolfinx.fem.Function(V, name=name, dtype=dataset.dtype) 

415 # Assume that mesh is first order for now 

416 x_dofmap = mesh.geometry.dofmap 

417 igi = np.array(mesh.geometry.input_global_indices, dtype=np.int64) 

418 

419 # This is dependent on how the data is read in. If distributed equally this is correct 

420 global_geom_input = igi[x_dofmap] 

421 

422 if backend_cls.read_mode == ReadMode.parallel: 

423 num_nodes_global = mesh.geometry.index_map().size_global 

424 global_geom_owner = index_owner(mesh.comm, global_geom_input.reshape(-1), num_nodes_global) 

425 elif backend_cls.read_mode == ReadMode.serial: 

426 # This is correct if everything is read in on rank 0 

427 global_geom_owner = np.zeros(len(global_geom_input.flatten()), dtype=np.int32) 

428 else: 

429 raise NotImplementedError(f"{backend_cls.read_mode} not implemented") 

430 

431 for i in range(num_components): 

432 arr_i = send_dofs_and_recv_values( 

433 global_geom_input.reshape(-1), 

434 global_geom_owner, 

435 mesh.comm, 

436 dataset[:, i], 

437 local_range_start, 

438 ) 

439 dof_pos = x_dofmap.reshape(-1) * num_components + i 

440 uh.x.array[dof_pos] = arr_i 

441 uh.x.scatter_forward() 

442 return uh 

443 

444 

445def read_cell_data( 

446 filename: Path | str, 

447 name: str, 

448 mesh: dolfinx.mesh.Mesh, 

449 time: float | None = None, 

450 backend_args: dict[str, Any] | None = None, 

451 backend: str = "xdmf", 

452) -> dolfinx.fem.Function: 

453 """Read data from the nodes of a mesh. 

454 

455 Note: 

456 Backend has to implement {py:class}`io4dolfinx.backends.read_cell_data`. 

457 

458 Args: 

459 filename: Path to file 

460 name: Name of point data 

461 mesh: The corresponding :py:class:`dolfinx.mesh.Mesh`. 

462 time: Time-step to read from. 

463 

464 Returns: 

465 A function in a DG-0 space on the mesh. The cells not found in input is set to zero. 

466 """ 

467 

468 backend_cls = get_backend(backend) 

469 

470 topology, dofs = backend_cls.read_cell_data( 

471 filename=filename, name=name, comm=mesh.comm, time=time, backend_args=backend_args 

472 ) 

473 num_components = dofs.shape[1] 

474 shape: tuple[int, ...] 

475 if num_components == 1: 

476 shape = () 

477 else: 

478 shape = (num_components,) 

479 V = dolfinx.fem.functionspace(mesh, ("DG", 0, shape)) 

480 u = dolfinx.fem.Function(V, dtype=dofs.dtype) 

481 data_array = u.x.array.reshape(-1, num_components) 

482 for i in range(num_components): 

483 local_entities, local_values = dolfinx.io.distribute_entity_data( 

484 mesh, mesh.topology.dim, topology, dofs[:, i].copy() 

485 ) 

486 adj = dolfinx.graph.adjacencylist(local_entities) 

487 order = np.arange(len(local_values), dtype=np.int32) 

488 mt = dolfinx.mesh.meshtags_from_entities(mesh, mesh.topology.dim, adj, order) 

489 data_array[mt.indices, i] = local_values[mt.values] 

490 u.x.scatter_forward() 

491 return u