Coverage for sources/emcdproj/website.py: 68%

210 statements  

« prev     ^ index     » next       coverage.py v7.9.1, created at 2025-06-29 21:50 +0000

1# vim: set filetype=python fileencoding=utf-8: 

2# -*- coding: utf-8 -*- 

3 

4#============================================================================# 

5# # 

6# Licensed under the Apache License, Version 2.0 (the "License"); # 

7# you may not use this file except in compliance with the License. # 

8# You may obtain a copy of the License at # 

9# # 

10# http://www.apache.org/licenses/LICENSE-2.0 # 

11# # 

12# Unless required by applicable law or agreed to in writing, software # 

13# distributed under the License is distributed on an "AS IS" BASIS, # 

14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # 

15# See the License for the specific language governing permissions and # 

16# limitations under the License. # 

17# # 

18#============================================================================# 

19 

20 

21''' Static website maintenance utilities for projects. ''' 

22 

23 

24from __future__ import annotations 

25 

26import jinja2 as _jinja2 

27 

28from . import __ 

29from . import exceptions as _exceptions 

30from . import interfaces as _interfaces 

31 

32 

33class CommandDispatcher( 

34 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

35): 

36 ''' Dispatches commands for static website maintenance. ''' 

37 

38 command: __.typx.Union[ 

39 __.typx.Annotated[ 

40 SurveyCommand, 

41 __.tyro.conf.subcommand( 'survey', prefix_name = False ), 

42 ], 

43 __.typx.Annotated[ 

44 UpdateCommand, 

45 __.tyro.conf.subcommand( 'update', prefix_name = False ), 

46 ], 

47 ] 

48 

49 async def __call__( 

50 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

51 ) -> None: 

52 ictr( 1 )( self.command ) 

53 await self.command( auxdata = auxdata, display = display ) 

54 

55 

56class SurveyCommand( 

57 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

58): 

59 ''' Surveys release versions published in static website. ''' 

60 

61 use_extant: __.typx.Annotated[ 

62 bool, 

63 __.typx.Doc( ''' Fetch publication branch and use tarball. ''' ), 

64 ] = False 

65 

66 async def __call__( 

67 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

68 ) -> None: 

69 survey( auxdata, use_extant = self.use_extant ) 

70 

71 

72class UpdateCommand( 

73 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

74): 

75 ''' Updates static website for particular release version. ''' 

76 

77 version: __.typx.Annotated[ 

78 str, 

79 __.typx.Doc( ''' Release version to update. ''' ), 

80 __.tyro.conf.Positional, 

81 ] 

82 

83 use_extant: __.typx.Annotated[ 

84 bool, 

85 __.typx.Doc( ''' Fetch publication branch and use tarball. ''' ), 

86 ] = False 

87 

88 production: __.typx.Annotated[ 

89 bool, 

90 __.typx.Doc( ''' Update publication branch with new tarball.  

91 Implies --use-extant to prevent data loss. ''' ), 

92 ] = False 

93 

94 async def __call__( 

95 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

96 ) -> None: 

97 update( 

98 auxdata, self.version, 

99 use_extant = self.use_extant, 

100 production = self.production ) 

101 

102 

103class Locations( metaclass = __.ImmutableDataclass ): 

104 ''' Locations associated with website maintenance. ''' 

105 

106 project: __.Path 

107 auxiliary: __.Path 

108 publications: __.Path 

109 archive: __.Path 

110 artifacts: __.Path 

111 website: __.Path 

112 coverage: __.Path 

113 index: __.Path 

114 versions: __.Path 

115 templates: __.Path 

116 

117 @classmethod 

118 def from_project_anchor( 

119 selfclass, 

120 auxdata: __.Globals, 

121 anchor: __.Absential[ __.Path ] = __.absent, 

122 ) -> __.typx.Self: 

123 ''' Produces locations from project anchor, if provided. 

124 

125 If project anchor is not given, then attempt to discover it. 

126 ''' 

127 if __.is_absent( anchor ): 127 ↛ 130line 127 didn't jump to line 130 because the condition on line 127 was never true

128 # TODO: Discover missing anchor via directory traversal, 

129 # seeking VCS markers. 

130 project = __.Path( ).resolve( strict = True ) 

131 else: project = anchor.resolve( strict = True ) 

132 auxiliary = project / '.auxiliary' 

133 publications = auxiliary / 'publications' 

134 templates = auxdata.distribution.provide_data_location( 'templates' ) 

135 return selfclass( 

136 project = project, 

137 auxiliary = auxiliary, 

138 publications = publications, 

139 archive = publications / 'website.tar.xz', 

140 artifacts = auxiliary / 'artifacts', 

141 website = auxiliary / 'artifacts/website', 

142 coverage = auxiliary / 'artifacts/website/coverage.svg', 

143 index = auxiliary / 'artifacts/website/index.html', 

144 versions = auxiliary / 'artifacts/website/versions.json', 

145 templates = templates ) 

146 

147 

148def survey( 

149 auxdata: __.Globals, *, 

150 project_anchor: __.Absential[ __.Path ] = __.absent, 

151 use_extant: bool = False 

152) -> None: 

153 ''' Surveys release versions published in static website. 

154 

155 Lists all versions from the versions manifest, showing their 

156 available documentation types and highlighting the latest version. 

157 ''' 

158 locations = Locations.from_project_anchor( auxdata, project_anchor ) 

159 

160 # Handle --use-extant flag: fetch publication branch and checkout tarball 

161 if use_extant: 

162 _fetch_publication_branch_and_tarball( locations ) 

163 # Extract the fetched tarball to view published versions 

164 if locations.archive.is_file( ): 

165 from tarfile import open as tarfile_open 

166 if locations.website.is_dir( ): 

167 __.shutil.rmtree( locations.website ) 

168 locations.website.mkdir( exist_ok = True, parents = True ) 

169 with tarfile_open( locations.archive, 'r:xz' ) as archive: 

170 archive.extractall( path = locations.website ) # noqa: S202 

171 

172 if not locations.versions.is_file( ): 

173 context = "published" if use_extant else "local" 

174 print( f"No versions manifest found for {context} website. " 

175 f"Run 'website update' first." ) 

176 return 

177 with locations.versions.open( 'r' ) as file: 

178 data = __.json.load( file ) 

179 versions = data.get( 'versions', { } ) 

180 latest = data.get( 'latest_version' ) 

181 if not versions: 

182 context = "published" if use_extant else "local" 

183 print( f"No versions found in {context} manifest." ) 

184 return 

185 context = "Published" if use_extant else "Local" 

186 print( f"{context} versions:" ) 

187 for version, species in versions.items( ): 

188 marker = " (latest)" if version == latest else "" 

189 species_list = ', '.join( species ) if species else "none" 

190 print( f" {version}{marker}: {species_list}" ) 

191 

192 

193def update( 

194 auxdata: __.Globals, 

195 version: str, *, 

196 project_anchor: __.Absential[ __.Path ] = __.absent, 

197 use_extant: bool = False, 

198 production: bool = False 

199) -> None: 

200 ''' Updates project website with latest documentation and coverage. 

201 

202 Processes the specified version, copies documentation artifacts, 

203 updates version information, and generates coverage badges. 

204 ''' 

205 ictr( 2 )( version ) 

206 # TODO: Validate version string format. 

207 from tarfile import open as tarfile_open 

208 locations = Locations.from_project_anchor( auxdata, project_anchor ) 

209 locations.publications.mkdir( exist_ok = True, parents = True ) 

210 # --production implies --use-extant to prevent clobbering existing versions 

211 if use_extant or production: 211 ↛ 212line 211 didn't jump to line 212 because the condition on line 211 was never true

212 _fetch_publication_branch_and_tarball( locations ) 

213 if locations.website.is_dir( ): __.shutil.rmtree( locations.website ) 

214 locations.website.mkdir( exist_ok = True, parents = True ) 

215 if locations.archive.is_file( ): 

216 with tarfile_open( locations.archive, 'r:xz' ) as archive: 

217 archive.extractall( path = locations.website ) # noqa: S202 

218 available_species = _update_available_species( locations, version ) 

219 j2context = _jinja2.Environment( 

220 loader = _jinja2.FileSystemLoader( locations.templates ), 

221 autoescape = True ) 

222 index_data = _update_versions_json( locations, version, available_species ) 

223 _enhance_index_data_with_stable_dev( index_data ) 

224 _create_stable_dev_directories( locations, index_data ) 

225 _update_index_html( locations, j2context, index_data ) 

226 if ( locations.artifacts / 'coverage-pytest' ).is_dir( ): 

227 _update_coverage_badge( locations, j2context ) 

228 _update_version_coverage_badge( locations, j2context, version ) 

229 ( locations.website / '.nojekyll' ).touch( ) 

230 from .filesystem import chdir 

231 with chdir( locations.website ): # noqa: SIM117 

232 with tarfile_open( locations.archive, 'w:xz' ) as archive: 

233 archive.add( '.' ) 

234 if production: _update_publication_branch( locations, version ) 

235 

236 

237def _create_stable_dev_directories( 

238 locations: Locations, data: dict[ __.typx.Any, __.typx.Any ] 

239) -> None: 

240 ''' Creates stable/ and development/ directories with current releases. 

241 

242 Copies the content from the identified stable and development versions 

243 to stable/ and development/ directories to provide persistent URLs 

244 that don't change when new versions are released. 

245 ''' 

246 stable_version = data.get( 'stable_version' ) 

247 development_version = data.get( 'development_version' ) 

248 if stable_version: 248 ↛ 255line 248 didn't jump to line 255 because the condition on line 248 was always true

249 stable_source = locations.website / stable_version 

250 stable_dest = locations.website / 'stable' 

251 if stable_dest.is_dir( ): 

252 __.shutil.rmtree( stable_dest ) 

253 if stable_source.is_dir( ): 253 ↛ 255line 253 didn't jump to line 255 because the condition on line 253 was always true

254 __.shutil.copytree( stable_source, stable_dest ) 

255 if development_version: 255 ↛ 256line 255 didn't jump to line 256 because the condition on line 255 was never true

256 dev_source = locations.website / development_version 

257 dev_dest = locations.website / 'development' 

258 if dev_dest.is_dir( ): 

259 __.shutil.rmtree( dev_dest ) 

260 if dev_source.is_dir( ): 

261 __.shutil.copytree( dev_source, dev_dest ) 

262 

263 

264def _enhance_index_data_with_stable_dev( 

265 data: dict[ __.typx.Any, __.typx.Any ] 

266) -> None: 

267 ''' Enhances index data with stable/development version information. 

268 

269 Identifies the latest stable release and latest development version 

270 from the versions data and adds them as separate entries for the 

271 stable/development table. 

272 ''' 

273 from packaging.version import Version 

274 versions = data.get( 'versions', { } ) 

275 if not versions: 275 ↛ 276line 275 didn't jump to line 276 because the condition on line 275 was never true

276 data[ 'stable_dev_versions' ] = { } 

277 return 

278 stable_version = None 

279 development_version = None 

280 # Sort versions by packaging.version.Version for proper comparison 

281 sorted_versions = sorted( 

282 versions.items( ), 

283 key = lambda entry: Version( entry[ 0 ] ), 

284 reverse = True ) 

285 # Find latest stable (non-prerelease) and development (prerelease) versions 

286 for version_string, species in sorted_versions: 

287 version_obj = Version( version_string ) 

288 if not version_obj.is_prerelease and stable_version is None: 288 ↛ 290line 288 didn't jump to line 290 because the condition on line 288 was always true

289 stable_version = ( version_string, species ) 

290 if version_obj.is_prerelease and development_version is None: 290 ↛ 291line 290 didn't jump to line 291 because the condition on line 290 was never true

291 development_version = ( version_string, species ) 

292 if stable_version and development_version: 292 ↛ 293line 292 didn't jump to line 293 because the condition on line 292 was never true

293 break 

294 stable_dev_versions: dict[ str, tuple[ str, ... ] ] = { } 

295 if stable_version: 295 ↛ 298line 295 didn't jump to line 298 because the condition on line 295 was always true

296 stable_dev_versions[ 'stable (current)' ] = stable_version[ 1 ] 

297 data[ 'stable_version' ] = stable_version[ 0 ] 

298 if development_version: 298 ↛ 299line 298 didn't jump to line 299 because the condition on line 298 was never true

299 stable_dev_versions[ 'development (current)' ] = ( 

300 development_version[ 1 ] ) 

301 data[ 'development_version' ] = development_version[ 0 ] 

302 data[ 'stable_dev_versions' ] = stable_dev_versions 

303 

304 

305def _extract_coverage( locations: Locations ) -> int: 

306 ''' Extracts coverage percentage from coverage report. 

307 

308 Reads the coverage XML report and calculates the overall line coverage 

309 percentage, rounded down to the nearest integer. 

310 ''' 

311 location = locations.artifacts / 'coverage-pytest/coverage.xml' 

312 if not location.exists( ): raise _exceptions.FileAwol( location ) 

313 from defusedxml import ElementTree 

314 root = ElementTree.parse( location ).getroot( ) # pyright: ignore 

315 if root is None: 

316 raise _exceptions.FileEmpty( location ) # pragma: no cover 

317 line_rate = root.get( 'line-rate' ) 

318 if not line_rate: 

319 raise _exceptions.FileDataAwol( 

320 location, 'line-rate' ) # pragma: no cover 

321 return __.math.floor( float( line_rate ) * 100 ) 

322 

323 

324def _fetch_publication_branch_and_tarball( locations: Locations ) -> None: 

325 ''' Fetches publication branch and checks out existing tarball. 

326 

327 Attempts to fetch the publication branch from origin and checkout 

328 the website tarball. Ignores failures if branch or tarball don't exist. 

329 ''' 

330 with __.ctxl.suppress( Exception ): 

331 __.subprocess.run( 

332 [ 'git', 'fetch', 'origin', 'publication:publication' ], 

333 cwd = locations.project, 

334 check = False, 

335 capture_output = True ) 

336 with __.ctxl.suppress( Exception ): 

337 __.subprocess.run( 

338 [ 'git', 'checkout', 'publication', '--', 

339 str( locations.archive ) ], 

340 cwd = locations.project, 

341 check = False, 

342 capture_output = True ) 

343 

344 

345def _generate_coverage_badge_svg( 

346 locations: Locations, j2context: _jinja2.Environment 

347) -> str: 

348 ''' Generates coverage badge SVG content. 

349 

350 Returns the rendered SVG content for a coverage badge based on the 

351 current coverage percentage. Colors indicate coverage quality: 

352 - red: < 50% 

353 - yellow: 50-79% 

354 - green: >= 80% 

355 ''' 

356 coverage = _extract_coverage( locations ) 

357 color = ( 

358 'red' if coverage < 50 else ( # noqa: PLR2004 

359 'yellow' if coverage < 80 else 'green' ) ) # noqa: PLR2004 

360 label_text = 'coverage' 

361 value_text = f"{coverage}%" 

362 label_width = len( label_text ) * 6 + 10 

363 value_width = len( value_text ) * 6 + 15 

364 total_width = label_width + value_width 

365 template = j2context.get_template( 'coverage.svg.jinja' ) 

366 # TODO: Add error handling for template rendering failures. 

367 return template.render( 

368 color = color, 

369 total_width = total_width, 

370 label_text = label_text, 

371 value_text = value_text, 

372 label_width = label_width, 

373 value_width = value_width ) 

374 

375 

376def _update_available_species( 

377 locations: Locations, version: str 

378) -> tuple[ str, ... ]: 

379 available_species: list[ str ] = [ ] 

380 for species in ( 'coverage-pytest', 'sphinx-html' ): 

381 origin = locations.artifacts / species 

382 if not origin.is_dir( ): continue 

383 destination = locations.website / version / species 

384 if destination.is_dir( ): __.shutil.rmtree( destination ) 

385 __.shutil.copytree( origin, destination ) 

386 available_species.append( species ) 

387 return tuple( available_species ) 

388 

389 

390def _update_coverage_badge( 

391 locations: Locations, j2context: _jinja2.Environment 

392) -> None: 

393 ''' Updates coverage badge SVG. 

394 

395 Generates a color-coded coverage badge based on the current coverage 

396 percentage and writes it to the main coverage.svg location. 

397 ''' 

398 svg_content = _generate_coverage_badge_svg( locations, j2context ) 

399 with locations.coverage.open( 'w' ) as file: 

400 file.write( svg_content ) 

401 

402 

403def _update_publication_branch( locations: Locations, version: str ) -> None: 

404 ''' Updates publication branch with new tarball. 

405 

406 Adds the tarball to git, commits to the publication branch, and pushes 

407 to origin. Uses the same approach as the GitHub workflow. 

408 ''' 

409 __.subprocess.run( 

410 [ 'git', 'add', str( locations.archive ) ], 

411 cwd = locations.project, 

412 check = True ) 

413 # Commit to publication branch without checkout 

414 # Get current tree hash 

415 tree_result = __.subprocess.run( 

416 [ 'git', 'write-tree' ], 

417 cwd = locations.project, 

418 check = True, 

419 capture_output = True, 

420 text = True ) 

421 tree_hash = tree_result.stdout.strip( ) 

422 # Create commit with publication branch as parent 

423 commit_result = __.subprocess.run( 

424 [ 'git', 'commit-tree', tree_hash, '-p', 'publication', 

425 '-m', f"Update documents for publication. ({version})" ], 

426 cwd = locations.project, 

427 check = True, 

428 capture_output = True, 

429 text = True ) 

430 commit_hash = commit_result.stdout.strip( ) 

431 __.subprocess.run( 

432 [ 'git', 'branch', '--force', 'publication', commit_hash ], 

433 cwd = locations.project, 

434 check = True ) 

435 __.subprocess.run( 

436 [ 'git', 'push', 'origin', 'publication:publication' ], 

437 cwd = locations.project, 

438 check = True ) 

439 

440 

441def _update_index_html( 

442 locations: Locations, 

443 j2context: _jinja2.Environment, 

444 data: dict[ __.typx.Any, __.typx.Any ], 

445) -> None: 

446 ''' Updates index.html with version information. 

447 

448 Generates the main index page showing all available versions and their 

449 associated documentation and coverage reports. 

450 ''' 

451 template = j2context.get_template( 'website.html.jinja' ) 

452 # TODO: Add error handling for template rendering failures. 

453 with locations.index.open( 'w' ) as file: 

454 file.write( template.render( **data ) ) 

455 

456 

457def _update_version_coverage_badge( 

458 locations: Locations, j2context: _jinja2.Environment, version: str 

459) -> None: 

460 ''' Updates version-specific coverage badge SVG. 

461 

462 Generates a coverage badge for the specific version and places it 

463 in the version's subtree. This allows each version to have its own 

464 coverage badge accessible at version/coverage.svg. 

465 ''' 

466 svg_content = _generate_coverage_badge_svg( locations, j2context ) 

467 version_coverage_path = locations.website / version / 'coverage.svg' 

468 with version_coverage_path.open( 'w' ) as file: 

469 file.write( svg_content ) 

470 

471 

472def _update_versions_json( 

473 locations: Locations, 

474 version: str, 

475 species: tuple[ str, ... ], 

476) -> dict[ __.typx.Any, __.typx.Any ]: 

477 ''' Updates versions.json with new version information. 

478 

479 Maintains a JSON file tracking all versions and their available 

480 documentation types. Versions are sorted in descending order, with 

481 the latest version marked separately. 

482 ''' 

483 # TODO: Add validation of version string format. 

484 # TODO: Consider file locking for concurrent update protection. 

485 from packaging.version import Version 

486 if not locations.versions.is_file( ): 

487 data: dict[ __.typx.Any, __.typx.Any ] = { 'versions': { } } 

488 with locations.versions.open( 'w' ) as file: 

489 __.json.dump( data, file, indent = 4 ) 

490 with locations.versions.open( 'r+' ) as file: 

491 data = __.json.load( file ) 

492 versions = data[ 'versions' ] 

493 versions[ version ] = species 

494 versions = dict( sorted( 

495 versions.items( ), 

496 key = lambda entry: Version( entry[ 0 ] ), 

497 reverse = True ) ) 

498 data[ 'latest_version' ] = next( iter( versions ) ) 

499 data[ 'versions' ] = versions 

500 file.seek( 0 ) 

501 __.json.dump( data, file, indent = 4 ) 

502 file.truncate( ) 

503 return data