Coverage for src / beamme / mesh_creation_functions / beam_parametric_curve.py: 98%

161 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-02-23 08:08 +0000

1# The MIT License (MIT) 

2# 

3# Copyright (c) 2018-2026 BeamMe Authors 

4# 

5# Permission is hereby granted, free of charge, to any person obtaining a copy 

6# of this software and associated documentation files (the "Software"), to deal 

7# in the Software without restriction, including without limitation the rights 

8# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 

9# copies of the Software, and to permit persons to whom the Software is 

10# furnished to do so, subject to the following conditions: 

11# 

12# The above copyright notice and this permission notice shall be included in 

13# all copies or substantial portions of the Software. 

14# 

15# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 

16# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 

17# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 

18# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 

19# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 

20# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 

21# THE SOFTWARE. 

22"""This file has functions to create a beam from a parametric curve.""" 

23 

24from typing import Callable as _Callable 

25from typing import Type as _Type 

26 

27import numpy as _np 

28import scipy.integrate as _integrate 

29from autograd import jacobian as _jacobian 

30from scipy.integrate import quad as _quad 

31from scipy.interpolate import interp1d as _interp1d 

32 

33from beamme.core.element_beam import Beam as _Beam 

34from beamme.core.geometry_set import GeometryName as _GeometryName 

35from beamme.core.material import MaterialBeamBase as _MaterialBeamBase 

36from beamme.core.mesh import Mesh as _Mesh 

37from beamme.core.rotation import Rotation as _Rotation 

38from beamme.core.rotation import smallest_rotation as _smallest_rotation 

39from beamme.mesh_creation_functions.beam_generic import ( 

40 create_beam_mesh_generic as _create_beam_mesh_generic, 

41) 

42 

43 

44class _ArcLengthEvaluation: 

45 """Class to allow evaluation of the arc length S(t) and the inverse mapping 

46 t(S). 

47 

48 This class uses precomputed samples to interpolate between arc 

49 length values. This is much more efficient than root finding 

50 algorithms and should provide a suitable accuracy. 

51 """ 

52 

53 def __init__( 

54 self, 

55 function_derivative: _Callable, 

56 interval: tuple[float, float], 

57 n_precomputed_intervals: int = 100, 

58 scipy_integrate: bool = True, 

59 scipy_integrate_points: list[float] | None = None, 

60 method: str = "arc-length", 

61 ) -> None: 

62 """Initialize the arc length evaluator and precomputed samples. 

63 

64 Args: 

65 function_derivative: 

66 Function that returns the tangent vector along the curve for a given 

67 parameter value t. 

68 interval: 

69 Start and end values for the parameter coordinate of the curve, 

70 must be in ascending order. 

71 n_precomputed_intervals: 

72 Number of intervals to use for the pre-computation. More intervals 

73 lead to higher accuracy. 

74 scipy_integrate: 

75 If true, the one single arc length integration is performed with scipy's 

76 quad function. This is an adaptive method and gives good estimates on 

77 subdivisions of the total interval. 

78 scipy_integrate_points: 

79 If scipy_integrate is true, this can be used to provide additional points 

80 for the adaptive integration. This can be useful if there are known points 

81 along the curve where Jacobian has a kink. 

82 method: 

83 Method to use for evaluation of nodal positions along the curve: 

84 - "arc-length": Uniform spacing along the arc-length of the curve. This means 

85 that elements along a curve will have equal length in space. 

86 - "parametric": Uniform spacing along the parameter coordinate of the curve. 

87 This means that elements along a curve will have equal length in parameter 

88 space, but not necessarily in physical space. 

89 - "parametric_consistent_middle_nodes": Same as `parametric`, but element 

90 middle nodes are adjusted to be consistent with the arc-length mapping. 

91 This means that elements along a curve will have equal length in parameter 

92 space, but the middle nodes are placed such that the elements themselves are 

93 not distorted. 

94 """ 

95 

96 if not scipy_integrate and scipy_integrate_points is not None: 

97 raise ValueError( 

98 "scipy_integrate_points cannot be provided if scipy_integrate is False!" 

99 ) 

100 

101 self.function_derivative = function_derivative 

102 self.interval = interval 

103 self.n_precomputed_intervals = n_precomputed_intervals 

104 self.scipy_integrate = scipy_integrate 

105 self.scipy_integrate_points = scipy_integrate_points 

106 self.method = method 

107 

108 self._compute_samples() 

109 self._compute_interpolation_functions() 

110 

111 def _compute_samples(self) -> None: 

112 """Compute the samples for the arc length mapping. 

113 

114 This function computes the arc length S(t) at a set of sample 

115 points along the parameter coordinate t with the accumulative 

116 Simpson integration. 

117 """ 

118 

119 if self.scipy_integrate: 

120 ds_dt = lambda t: _np.linalg.norm(self.function_derivative([t])[0]) 

121 integral = _quad( 

122 ds_dt, 

123 self.interval[0], 

124 self.interval[1], 

125 points=self.scipy_integrate_points, 

126 full_output=True, 

127 ) 

128 integral_data = integral[2] 

129 n_segments = integral_data["last"] 

130 

131 # Sort the segments such that they are in ascending order along the 

132 # integration integral. 

133 interval_sort = _np.argsort(integral_data["alist"][:n_segments]) 

134 intervals_integral = integral_data["rlist"][interval_sort] 

135 intervals_left_end = integral_data["alist"][interval_sort] 

136 intervals_right_end = integral_data["blist"][interval_sort] 

137 

138 self.t_grid = _np.zeros(n_segments * self.n_precomputed_intervals + 1) 

139 self.S_grid = _np.zeros(n_segments * self.n_precomputed_intervals + 1) 

140 local_integral_start = 0.0 

141 for i_segment in range(n_segments): 

142 segment_a = intervals_left_end[i_segment] 

143 segment_b = intervals_right_end[i_segment] 

144 

145 global_start_index = i_segment * self.n_precomputed_intervals 

146 global_end_index = (i_segment + 1) * self.n_precomputed_intervals + 1 

147 

148 t_local = _np.linspace( 

149 segment_a, segment_b, self.n_precomputed_intervals + 1 

150 ) 

151 self.t_grid[global_start_index:global_end_index] = t_local 

152 

153 local_integral = intervals_integral[i_segment] 

154 

155 tangents = self.function_derivative(t_local) 

156 ds_at_t_samples = _np.linalg.norm(tangents, axis=1) 

157 S_local = _integrate.cumulative_simpson( 

158 y=ds_at_t_samples, x=t_local, initial=0.0 

159 ) 

160 S_local *= local_integral / S_local[-1] 

161 S_local += local_integral_start 

162 

163 self.S_grid[global_start_index:global_end_index] = S_local 

164 

165 local_integral_start += local_integral 

166 

167 else: 

168 # Uniform grid in t for sampling. 

169 self.t_grid = _np.linspace( 

170 self.interval[0], self.interval[1], self.n_precomputed_intervals + 1 

171 ) 

172 

173 # Evaluate ds at all grid points. 

174 tangents = self.function_derivative(self.t_grid) 

175 ds_at_t_samples = _np.linalg.norm(tangents, axis=1) 

176 

177 self.S_grid = _integrate.cumulative_simpson( 

178 y=ds_at_t_samples, x=self.t_grid, initial=0.0 

179 ) 

180 

181 def _compute_interpolation_functions(self) -> None: 

182 """Setup the interpolation functions for S(t) and t(S).""" 

183 self.S_from_t = _interp1d( 

184 self.t_grid, 

185 self.S_grid, 

186 kind="cubic", 

187 fill_value="extrapolate", 

188 assume_sorted=True, 

189 ) 

190 self.t_from_S = _interp1d( 

191 self.S_grid, 

192 self.t_grid, 

193 kind="cubic", 

194 fill_value="extrapolate", 

195 assume_sorted=True, 

196 ) 

197 

198 def approximate_total_arc_length(self) -> float: 

199 """Approximate the total arc length along the curve. 

200 

201 This value is only needed to choose the number of elements along 

202 the curve. 

203 """ 

204 return self.S_grid[-1] 

205 

206 def get_total_arc_length(self) -> float: 

207 """Get the total arc length along the curve. 

208 

209 This function might return a different arc-length than `approximate_total_arc_length`, 

210 if the integral is adaptively refined in `evaluate_all`. 

211 """ 

212 return self.S_grid[-1] 

213 

214 def evaluate_all( 

215 self, evaluation_points: _np.ndarray, middle_node_flags: _np.ndarray 

216 ) -> tuple[_np.ndarray, _np.ndarray]: 

217 """Evaluate the parameter coordinates corresponding to each node. 

218 

219 Args: 

220 evaluation_points: 

221 Evaluation points in the interval [0, 1]. Depending on the integration method, 

222 this can be in normalized parameter space or arc-length space. 

223 middle_node_flags: 

224 Boolean array indicating which of the evaluation points 

225 are middle nodes (True) and which are nodal points (False). 

226 

227 Returns: 

228 t_evaluate: 

229 Parameter coordinates along the curve for each evaluation point. 

230 S_evaluate: 

231 Arc-length coordinates along the curve for each evaluation point. 

232 """ 

233 

234 # Todo: Check if it makes sense to adaptively refine the arc-length 

235 # integration here. 

236 

237 if self.method == "arc-length": 

238 S_evaluate = evaluation_points * self.get_total_arc_length() 

239 t_evaluate = self.t_from_S(S_evaluate) 

240 

241 elif self.method == "parametric": 

242 interval_length = self.interval[1] - self.interval[0] 

243 t_evaluate = self.interval[0] + evaluation_points * interval_length 

244 S_evaluate = self.S_from_t(t_evaluate) 

245 

246 elif self.method == "parametric_consistent_middle_nodes": 

247 interval_length = self.interval[1] - self.interval[0] 

248 is_nodal_point = ~middle_node_flags 

249 nodal_evaluation_points = evaluation_points[is_nodal_point] 

250 

251 n_el = len(nodal_evaluation_points) - 1 

252 middle_nodes = int(((len(evaluation_points) - 1) / n_el) - 1) 

253 

254 t_evaluate = _np.zeros_like(evaluation_points) 

255 S_evaluate = _np.zeros_like(evaluation_points) 

256 

257 # Evaluate the nodal points based on direct parametric mapping. 

258 t_evaluate[is_nodal_point] = ( 

259 self.interval[0] + nodal_evaluation_points * interval_length 

260 ) 

261 S_evaluate[is_nodal_point] = self.S_from_t(t_evaluate[is_nodal_point]) 

262 

263 # For the middle nodes, do an interpolation in arc-length space. 

264 for i_interval in range(n_el): 

265 eval_a = nodal_evaluation_points[i_interval] 

266 eval_b = nodal_evaluation_points[i_interval + 1] 

267 

268 S_a = S_evaluate[i_interval * (middle_nodes + 1)] 

269 S_b = S_evaluate[(i_interval + 1) * (middle_nodes + 1)] 

270 

271 for i_middle in range(middle_nodes): 

272 index_node = i_interval * (middle_nodes + 1) + i_middle + 1 

273 eval_middle_node = evaluation_points[index_node] 

274 factor = (eval_middle_node - eval_a) / (eval_b - eval_a) 

275 S = S_a + factor * (S_b - S_a) 

276 t_evaluate[index_node] = self.t_from_S(S) 

277 S_evaluate[index_node] = S 

278 

279 else: 

280 raise ValueError(f"Unknown method {self.method} for arc-length evaluation!") 

281 

282 return (t_evaluate, S_evaluate) 

283 

284 

285def create_beam_mesh_parametric_curve( 

286 mesh: _Mesh, 

287 beam_class: _Type[_Beam], 

288 material: _MaterialBeamBase, 

289 function: _Callable, 

290 interval: tuple[float, float], 

291 *, 

292 output_length: bool | None = False, 

293 function_derivative: _Callable | None = None, 

294 function_rotation: _Callable | None = None, 

295 vectorized: bool = False, 

296 arc_length_integrator_kwargs: dict | None = None, 

297 **kwargs, 

298) -> _GeometryName | tuple[_GeometryName, float]: 

299 """Generate a beam from a parametric curve. Integration along the beam is 

300 performed with scipy, and if the gradient is not explicitly provided, it is 

301 calculated with the numpy wrapper autograd. 

302 

303 Args 

304 ---- 

305 mesh: Mesh 

306 Mesh that the curve will be added to. 

307 beam_class: Beam 

308 Class of beam that will be used for this line. 

309 material: Material 

310 Material for this line. 

311 function: function 

312 3D-parametric curve that represents the beam axis. If only a 2D 

313 point is returned, the triad creation is simplified. If 

314 mathematical functions are used, they have to come from the wrapper 

315 autograd.numpy. 

316 interval: [start end] 

317 Start and end values for the parameter of the curve, must be in ascending 

318 order. 

319 output_length: bool 

320 If this is true, the function returns a tuple containing the created 

321 sets and the total arc length along the integrated function. 

322 function_derivative: function -> R3 

323 Explicitly provide the jacobian of the centerline position. 

324 function_rotation: function -> Rotation 

325 If this argument is given, the triads are computed with this 

326 function, on the same interval as the position function. Must 

327 return a Rotation object. 

328 If no function_rotation is given, the rotation of the first node 

329 is calculated automatically and all subsequent nodal rotations 

330 are calculated based on a smallest rotation mapping onto the curve 

331 tangent vector. 

332 vectorized: 

333 If true, the function and function_derivative are assumed to be 

334 vectorized, i.e., they can take arrays of parameter values and return 

335 arrays of positions/tangents. 

336 arc_length_integrator_kwargs: 

337 Additional arguments for the arc-length integrator. 

338 

339 **kwargs (for all of them look into create_beam_mesh_function) 

340 ---- 

341 n_el: int 

342 Number of equally spaced beam elements along the line. Defaults to 1. 

343 Mutually exclusive with l_el. 

344 l_el: float 

345 Desired length of beam elements. Mutually exclusive with n_el. 

346 Be aware, that this length might not be achieved, if the elements are 

347 warped after they are created. 

348 

349 Return 

350 ---- 

351 return_set: GeometryName 

352 Set with the 'start' and 'end' node of the curve. Also a 'line' set 

353 with all nodes of the curve. 

354 """ 

355 

356 # Set default values for optional arguments. 

357 if arc_length_integrator_kwargs is None: 

358 arc_length_integrator_kwargs = {} 

359 

360 # To avoid issues with automatic differentiation, we need to ensure that the interval 

361 # values are of type float. 

362 interval_array = _np.asarray(interval, dtype=float) 

363 

364 # Validate interval shape, length and order. 

365 if interval_array.ndim != 1: 

366 raise ValueError( 

367 f"Interval must be a 1D sequence of exactly two values, got array with shape {interval_array.shape}." 

368 ) 

369 if interval_array.size != 2: 

370 raise ValueError( 

371 f"Interval must contain exactly two values, got {interval_array.size}." 

372 ) 

373 if interval_array[0] >= interval_array[1]: 

374 raise ValueError(f"Interval must be in ascending order, got {interval_array}.") 

375 

376 # Ensure that the function can be evaluated for multiple values of t at once. 

377 if not vectorized: 

378 original_function = function 

379 

380 def function(t_array): 

381 """Perform a vectorized evaluation of the function.""" 

382 return _np.array([original_function(t) for t in t_array]) 

383 

384 if function_derivative is not None: 

385 original_function_derivative = function_derivative 

386 else: 

387 # If no function derivative is given, we can use autograd to compute it. We need to make sure that the 

388 # autograd jacobian can also handle vectorized inputs. 

389 original_function_derivative = _jacobian(original_function) 

390 

391 def function_derivative(t_array): 

392 """Perform a vectorized evaluation of the function derivative.""" 

393 return _np.array([original_function_derivative(t) for t in t_array]) 

394 

395 if function_derivative is None: 

396 raise ValueError( 

397 "Function derivative could not be determined! For vectorized inputs, " 

398 "the function derivative must be explicitly provided." 

399 ) 

400 

401 # Check size and type of position function 

402 test_evaluation_of_function = function([interval_array[0]])[0] 

403 

404 if len(test_evaluation_of_function) == 2: 

405 is_3d_curve = False 

406 elif len(test_evaluation_of_function) == 3: 

407 is_3d_curve = True 

408 else: 

409 raise ValueError("Function must return either 2d or 3d curve!") 

410 

411 # Setup the arc length integration object. 

412 arc_length_evaluator = _ArcLengthEvaluation( 

413 function_derivative, interval_array, **arc_length_integrator_kwargs 

414 ) 

415 

416 class _BeamFunctionGenerator: 

417 """This class manages the creation the actual beam nodes and 

418 rotations.""" 

419 

420 def __init__( 

421 self, 

422 interval_array: _np.ndarray, 

423 function: _Callable, 

424 function_derivative: _Callable, 

425 function_rotation: _Callable | None, 

426 is_3d_curve: bool, 

427 ): 

428 """Initialize the object and define a starting triad.""" 

429 self.interval_array = interval_array 

430 self.function = function 

431 self.function_derivative = function_derivative 

432 self.function_rotation = function_rotation 

433 self.is_3d_curve = is_3d_curve 

434 

435 if self.is_3d_curve: 

436 r_prime = self.function_derivative([self.interval_array[0]])[0] 

437 if abs(_np.dot(r_prime, [0, 0, 1])) < abs(_np.dot(r_prime, [0, 1, 0])): 

438 t2_temp = [0, 0, 1] 

439 else: 

440 t2_temp = [0, 1, 0] 

441 self.last_created_triad = _Rotation.from_basis(r_prime, t2_temp) 

442 

443 def evaluate_positions_and_rotations( 

444 self, evaluation_positions: _np.ndarray, middle_node_flags: _np.ndarray 

445 ) -> tuple[_np.ndarray, list[_Rotation], _np.ndarray]: 

446 """This function evaluates the positions and rotations for given 

447 node positions within the interval [0,1]. 

448 

449 Args: 

450 evaluation_positions: 

451 Node positions in the interval [0, 1]. Depending on the integration method, 

452 this can be in normalized parameter space or arc-length space. 

453 middle_node_flags: 

454 Boolean array indicating which of the node positions 

455 are middle nodes (True) and which are inter element nodes (False). 

456 

457 Returns: 

458 coordinates: 

459 Physical coordinates of all nodes points along the curve. 

460 rotations: 

461 Rotations at all nodes points along the curve. 

462 """ 

463 

464 # Get the nodal parameter coordinates and the nodal arc-lengths. 

465 t_evaluate, S_evaluate = arc_length_evaluator.evaluate_all( 

466 evaluation_positions, middle_node_flags 

467 ) 

468 

469 # Position at S. 

470 coordinates = _np.zeros((len(evaluation_positions), 3)) 

471 function_evaluation = self.function(t_evaluate) 

472 if self.is_3d_curve: 

473 coordinates[:, :] = function_evaluation 

474 else: 

475 coordinates[:, :2] = function_evaluation 

476 

477 # Rotation at S. 

478 rotations = [] 

479 if self.function_rotation is not None: 

480 for t in t_evaluate: 

481 rotations.append(self.function_rotation(t)) 

482 

483 else: 

484 tangents = self.function_derivative(t_evaluate) 

485 for r_prime in tangents: 

486 if self.is_3d_curve: 

487 # Create the next triad via the smallest rotation mapping based 

488 # on the last triad. 

489 rot = _smallest_rotation(self.last_created_triad, r_prime) 

490 self.last_created_triad = rot.copy() 

491 else: 

492 # The rotation simplifies in the 2d case. 

493 rot = _Rotation([0, 0, 1], _np.arctan2(r_prime[1], r_prime[0])) 

494 rotations.append(rot) 

495 

496 # Return the needed values for beam creation. 

497 return (coordinates, rotations, S_evaluate) 

498 

499 # Create the beam in the mesh 

500 created_sets = _create_beam_mesh_generic( 

501 mesh, 

502 beam_class=beam_class, 

503 material=material, 

504 beam_function_evaluate_positions_and_rotations=True, 

505 beam_function=_BeamFunctionGenerator( 

506 interval_array, 

507 function, 

508 function_derivative, 

509 function_rotation, 

510 is_3d_curve, 

511 ), 

512 interval=(0.0, 1.0), 

513 interval_length=arc_length_evaluator.approximate_total_arc_length(), 

514 **kwargs, 

515 ) 

516 

517 if output_length: 

518 return (created_sets, arc_length_evaluator.get_total_arc_length()) 

519 else: 

520 return created_sets