Coverage for sources/emcdproj/website.py: 68%

210 statements  

« prev     ^ index     » next       coverage.py v7.9.2, created at 2025-07-16 21:44 +0000

1# vim: set filetype=python fileencoding=utf-8: 

2# -*- coding: utf-8 -*- 

3 

4#============================================================================# 

5# # 

6# Licensed under the Apache License, Version 2.0 (the "License"); # 

7# you may not use this file except in compliance with the License. # 

8# You may obtain a copy of the License at # 

9# # 

10# http://www.apache.org/licenses/LICENSE-2.0 # 

11# # 

12# Unless required by applicable law or agreed to in writing, software # 

13# distributed under the License is distributed on an "AS IS" BASIS, # 

14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # 

15# See the License for the specific language governing permissions and # 

16# limitations under the License. # 

17# # 

18#============================================================================# 

19 

20 

21''' Static website maintenance utilities for projects. ''' 

22 

23 

24import jinja2 as _jinja2 

25 

26from . import __ 

27from . import exceptions as _exceptions 

28from . import interfaces as _interfaces 

29 

30 

31class SurveyCommand( 

32 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

33): 

34 ''' Surveys release versions published in static website. ''' 

35 

36 use_extant: __.typx.Annotated[ 

37 bool, 

38 __.typx.Doc( ''' Fetch publication branch and use tarball. ''' ), 

39 ] = False 

40 

41 async def __call__( 

42 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

43 ) -> None: 

44 survey( auxdata, use_extant = self.use_extant ) 

45 

46 

47class UpdateCommand( 

48 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

49): 

50 ''' Updates static website for particular release version. ''' 

51 

52 version: __.typx.Annotated[ 

53 str, 

54 __.typx.Doc( ''' Release version to update. ''' ), 

55 __.tyro.conf.Positional, 

56 ] 

57 

58 use_extant: __.typx.Annotated[ 

59 bool, 

60 __.typx.Doc( ''' Fetch publication branch and use tarball. ''' ), 

61 ] = False 

62 

63 production: __.typx.Annotated[ 

64 bool, 

65 __.typx.Doc( ''' Update publication branch with new tarball. 

66 Implies --use-extant to prevent data loss. ''' ), 

67 ] = False 

68 

69 async def __call__( 

70 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

71 ) -> None: 

72 update( 

73 auxdata, self.version, 

74 use_extant = self.use_extant, 

75 production = self.production ) 

76 

77 

78class CommandDispatcher( 

79 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

80): 

81 ''' Dispatches commands for static website maintenance. ''' 

82 

83 command: __.typx.Union[ 

84 __.typx.Annotated[ 

85 SurveyCommand, 

86 __.tyro.conf.subcommand( 'survey', prefix_name = False ), 

87 ], 

88 __.typx.Annotated[ 

89 UpdateCommand, 

90 __.tyro.conf.subcommand( 'update', prefix_name = False ), 

91 ], 

92 ] 

93 

94 async def __call__( 

95 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

96 ) -> None: 

97 ictr( 1 )( self.command ) 

98 await self.command( auxdata = auxdata, display = display ) 

99 

100 

101class Locations( __.immut.DataclassObject ): 

102 ''' Locations associated with website maintenance. ''' 

103 

104 project: __.Path 

105 auxiliary: __.Path 

106 publications: __.Path 

107 archive: __.Path 

108 artifacts: __.Path 

109 website: __.Path 

110 coverage: __.Path 

111 index: __.Path 

112 versions: __.Path 

113 templates: __.Path 

114 

115 @classmethod 

116 def from_project_anchor( 

117 selfclass, 

118 auxdata: __.Globals, 

119 anchor: __.Absential[ __.Path ] = __.absent, 

120 ) -> __.typx.Self: 

121 ''' Produces locations from project anchor, if provided. 

122 

123 If project anchor is not given, then attempt to discover it. 

124 ''' 

125 if __.is_absent( anchor ): 125 ↛ 128line 125 didn't jump to line 128 because the condition on line 125 was never true

126 # TODO: Discover missing anchor via directory traversal, 

127 # seeking VCS markers. 

128 project = __.Path( ).resolve( strict = True ) 

129 else: project = anchor.resolve( strict = True ) 

130 auxiliary = project / '.auxiliary' 

131 publications = auxiliary / 'publications' 

132 templates = auxdata.distribution.provide_data_location( 'templates' ) 

133 return selfclass( 

134 project = project, 

135 auxiliary = auxiliary, 

136 publications = publications, 

137 archive = publications / 'website.tar.xz', 

138 artifacts = auxiliary / 'artifacts', 

139 website = auxiliary / 'artifacts/website', 

140 coverage = auxiliary / 'artifacts/website/coverage.svg', 

141 index = auxiliary / 'artifacts/website/index.html', 

142 versions = auxiliary / 'artifacts/website/versions.json', 

143 templates = templates ) 

144 

145 

146def survey( 

147 auxdata: __.Globals, *, 

148 project_anchor: __.Absential[ __.Path ] = __.absent, 

149 use_extant: bool = False 

150) -> None: 

151 ''' Surveys release versions published in static website. 

152 

153 Lists all versions from the versions manifest, showing their 

154 available documentation types and highlighting the latest version. 

155 ''' 

156 locations = Locations.from_project_anchor( auxdata, project_anchor ) 

157 

158 # Handle --use-extant flag: fetch publication branch and checkout tarball 

159 if use_extant: 

160 _fetch_publication_branch_and_tarball( locations ) 

161 # Extract the fetched tarball to view published versions 

162 if locations.archive.is_file( ): 

163 from tarfile import open as tarfile_open 

164 if locations.website.is_dir( ): 

165 __.shutil.rmtree( locations.website ) 

166 locations.website.mkdir( exist_ok = True, parents = True ) 

167 with tarfile_open( locations.archive, 'r:xz' ) as archive: 

168 archive.extractall( path = locations.website ) # noqa: S202 

169 

170 if not locations.versions.is_file( ): 

171 context = "published" if use_extant else "local" 

172 print( f"No versions manifest found for {context} website. " 

173 f"Run 'website update' first." ) 

174 return 

175 with locations.versions.open( 'r' ) as file: 

176 data = __.json.load( file ) 

177 versions = data.get( 'versions', { } ) 

178 latest = data.get( 'latest_version' ) 

179 if not versions: 

180 context = "published" if use_extant else "local" 

181 print( f"No versions found in {context} manifest." ) 

182 return 

183 context = "Published" if use_extant else "Local" 

184 print( f"{context} versions:" ) 

185 for version, species in versions.items( ): 

186 marker = " (latest)" if version == latest else "" 

187 species_list = ', '.join( species ) if species else "none" 

188 print( f" {version}{marker}: {species_list}" ) 

189 

190 

191def update( 

192 auxdata: __.Globals, 

193 version: str, *, 

194 project_anchor: __.Absential[ __.Path ] = __.absent, 

195 use_extant: bool = False, 

196 production: bool = False 

197) -> None: 

198 ''' Updates project website with latest documentation and coverage. 

199 

200 Processes the specified version, copies documentation artifacts, 

201 updates version information, and generates coverage badges. 

202 ''' 

203 ictr( 2 )( version ) 

204 # TODO: Validate version string format. 

205 from tarfile import open as tarfile_open 

206 locations = Locations.from_project_anchor( auxdata, project_anchor ) 

207 locations.publications.mkdir( exist_ok = True, parents = True ) 

208 # --production implies --use-extant to prevent clobbering existing versions 

209 if use_extant or production: 209 ↛ 210line 209 didn't jump to line 210 because the condition on line 209 was never true

210 _fetch_publication_branch_and_tarball( locations ) 

211 if locations.website.is_dir( ): __.shutil.rmtree( locations.website ) 

212 locations.website.mkdir( exist_ok = True, parents = True ) 

213 if locations.archive.is_file( ): 

214 with tarfile_open( locations.archive, 'r:xz' ) as archive: 

215 archive.extractall( path = locations.website ) # noqa: S202 

216 available_species = _update_available_species( locations, version ) 

217 j2context = _jinja2.Environment( 

218 loader = _jinja2.FileSystemLoader( locations.templates ), 

219 autoescape = True ) 

220 index_data = _update_versions_json( locations, version, available_species ) 

221 _enhance_index_data_with_stable_dev( index_data ) 

222 _create_stable_dev_directories( locations, index_data ) 

223 _update_index_html( locations, j2context, index_data ) 

224 if ( locations.artifacts / 'coverage-pytest' ).is_dir( ): 

225 _update_coverage_badge( locations, j2context ) 

226 _update_version_coverage_badge( locations, j2context, version ) 

227 ( locations.website / '.nojekyll' ).touch( ) 

228 from .filesystem import chdir 

229 with chdir( locations.website ): # noqa: SIM117 

230 with tarfile_open( locations.archive, 'w:xz' ) as archive: 

231 archive.add( '.' ) 

232 if production: _update_publication_branch( locations, version ) 

233 

234 

235def _create_stable_dev_directories( 

236 locations: Locations, data: dict[ __.typx.Any, __.typx.Any ] 

237) -> None: 

238 ''' Creates stable/ and development/ directories with current releases. 

239 

240 Copies the content from the identified stable and development versions 

241 to stable/ and development/ directories to provide persistent URLs 

242 that don't change when new versions are released. 

243 ''' 

244 stable_version = data.get( 'stable_version' ) 

245 development_version = data.get( 'development_version' ) 

246 if stable_version: 246 ↛ 253line 246 didn't jump to line 253 because the condition on line 246 was always true

247 stable_source = locations.website / stable_version 

248 stable_dest = locations.website / 'stable' 

249 if stable_dest.is_dir( ): 

250 __.shutil.rmtree( stable_dest ) 

251 if stable_source.is_dir( ): 251 ↛ 253line 251 didn't jump to line 253 because the condition on line 251 was always true

252 __.shutil.copytree( stable_source, stable_dest ) 

253 if development_version: 253 ↛ 254line 253 didn't jump to line 254 because the condition on line 253 was never true

254 dev_source = locations.website / development_version 

255 dev_dest = locations.website / 'development' 

256 if dev_dest.is_dir( ): 

257 __.shutil.rmtree( dev_dest ) 

258 if dev_source.is_dir( ): 

259 __.shutil.copytree( dev_source, dev_dest ) 

260 

261 

262def _enhance_index_data_with_stable_dev( 

263 data: dict[ __.typx.Any, __.typx.Any ] 

264) -> None: 

265 ''' Enhances index data with stable/development version information. 

266 

267 Identifies the latest stable release and latest development version 

268 from the versions data and adds them as separate entries for the 

269 stable/development table. 

270 ''' 

271 from packaging.version import Version 

272 versions = data.get( 'versions', { } ) 

273 if not versions: 273 ↛ 274line 273 didn't jump to line 274 because the condition on line 273 was never true

274 data[ 'stable_dev_versions' ] = { } 

275 return 

276 stable_version = None 

277 development_version = None 

278 # Sort versions by packaging.version.Version for proper comparison 

279 sorted_versions = sorted( 

280 versions.items( ), 

281 key = lambda entry: Version( entry[ 0 ] ), 

282 reverse = True ) 

283 # Find latest stable (non-prerelease) and development (prerelease) versions 

284 for version_string, species in sorted_versions: 

285 version_obj = Version( version_string ) 

286 if not version_obj.is_prerelease and stable_version is None: 286 ↛ 288line 286 didn't jump to line 288 because the condition on line 286 was always true

287 stable_version = ( version_string, species ) 

288 if version_obj.is_prerelease and development_version is None: 288 ↛ 289line 288 didn't jump to line 289 because the condition on line 288 was never true

289 development_version = ( version_string, species ) 

290 if stable_version and development_version: 290 ↛ 291line 290 didn't jump to line 291 because the condition on line 290 was never true

291 break 

292 stable_dev_versions: dict[ str, tuple[ str, ... ] ] = { } 

293 if stable_version: 293 ↛ 296line 293 didn't jump to line 296 because the condition on line 293 was always true

294 stable_dev_versions[ 'stable (current)' ] = stable_version[ 1 ] 

295 data[ 'stable_version' ] = stable_version[ 0 ] 

296 if development_version: 296 ↛ 297line 296 didn't jump to line 297 because the condition on line 296 was never true

297 stable_dev_versions[ 'development (current)' ] = ( 

298 development_version[ 1 ] ) 

299 data[ 'development_version' ] = development_version[ 0 ] 

300 data[ 'stable_dev_versions' ] = stable_dev_versions 

301 

302 

303def _extract_coverage( locations: Locations ) -> int: 

304 ''' Extracts coverage percentage from coverage report. 

305 

306 Reads the coverage XML report and calculates the overall line coverage 

307 percentage, rounded down to the nearest integer. 

308 ''' 

309 location = locations.artifacts / 'coverage-pytest/coverage.xml' 

310 if not location.exists( ): raise _exceptions.FileAwol( location ) 

311 from defusedxml import ElementTree 

312 root = ElementTree.parse( location ).getroot( ) # pyright: ignore 

313 if root is None: 

314 raise _exceptions.FileEmpty( location ) # pragma: no cover 

315 line_rate = root.get( 'line-rate' ) 

316 if not line_rate: 

317 raise _exceptions.FileDataAwol( 

318 location, 'line-rate' ) # pragma: no cover 

319 return __.math.floor( float( line_rate ) * 100 ) 

320 

321 

322def _fetch_publication_branch_and_tarball( locations: Locations ) -> None: 

323 ''' Fetches publication branch and checks out existing tarball. 

324 

325 Attempts to fetch the publication branch from origin and checkout 

326 the website tarball. Ignores failures if branch or tarball don't exist. 

327 ''' 

328 with __.ctxl.suppress( Exception ): 

329 __.subprocess.run( 

330 [ 'git', 'fetch', 'origin', 'publication:publication' ], 

331 cwd = locations.project, 

332 check = False, 

333 capture_output = True ) 

334 with __.ctxl.suppress( Exception ): 

335 __.subprocess.run( 

336 [ 'git', 'checkout', 'publication', '--', 

337 str( locations.archive ) ], 

338 cwd = locations.project, 

339 check = False, 

340 capture_output = True ) 

341 

342 

343def _generate_coverage_badge_svg( 

344 locations: Locations, j2context: _jinja2.Environment 

345) -> str: 

346 ''' Generates coverage badge SVG content. 

347 

348 Returns the rendered SVG content for a coverage badge based on the 

349 current coverage percentage. Colors indicate coverage quality: 

350 - red: < 50% 

351 - yellow: 50-79% 

352 - green: >= 80% 

353 ''' 

354 coverage = _extract_coverage( locations ) 

355 color = ( 

356 'red' if coverage < 50 else ( # noqa: PLR2004 

357 'yellow' if coverage < 80 else 'green' ) ) # noqa: PLR2004 

358 label_text = 'coverage' 

359 value_text = f"{coverage}%" 

360 label_width = len( label_text ) * 6 + 10 

361 value_width = len( value_text ) * 6 + 15 

362 total_width = label_width + value_width 

363 template = j2context.get_template( 'coverage.svg.jinja' ) 

364 # TODO: Add error handling for template rendering failures. 

365 return template.render( 

366 color = color, 

367 total_width = total_width, 

368 label_text = label_text, 

369 value_text = value_text, 

370 label_width = label_width, 

371 value_width = value_width ) 

372 

373 

374def _update_available_species( 

375 locations: Locations, version: str 

376) -> tuple[ str, ... ]: 

377 available_species: list[ str ] = [ ] 

378 for species in ( 'coverage-pytest', 'sphinx-html' ): 

379 origin = locations.artifacts / species 

380 if not origin.is_dir( ): continue 

381 destination = locations.website / version / species 

382 if destination.is_dir( ): __.shutil.rmtree( destination ) 

383 __.shutil.copytree( origin, destination ) 

384 available_species.append( species ) 

385 return tuple( available_species ) 

386 

387 

388def _update_coverage_badge( 

389 locations: Locations, j2context: _jinja2.Environment 

390) -> None: 

391 ''' Updates coverage badge SVG. 

392 

393 Generates a color-coded coverage badge based on the current coverage 

394 percentage and writes it to the main coverage.svg location. 

395 ''' 

396 svg_content = _generate_coverage_badge_svg( locations, j2context ) 

397 with locations.coverage.open( 'w' ) as file: 

398 file.write( svg_content ) 

399 

400 

401def _update_publication_branch( locations: Locations, version: str ) -> None: 

402 ''' Updates publication branch with new tarball. 

403 

404 Adds the tarball to git, commits to the publication branch, and pushes 

405 to origin. Uses the same approach as the GitHub workflow. 

406 ''' 

407 __.subprocess.run( 

408 [ 'git', 'add', str( locations.archive ) ], 

409 cwd = locations.project, 

410 check = True ) 

411 # Commit to publication branch without checkout 

412 # Get current tree hash 

413 tree_result = __.subprocess.run( 

414 [ 'git', 'write-tree' ], 

415 cwd = locations.project, 

416 check = True, capture_output = True, text = True ) 

417 tree_hash = tree_result.stdout.strip( ) 

418 # Check if publication branch exists 

419 publication_exists = __.subprocess.run( 

420 [ 'git', 'show-ref', '--verify', '--quiet', 'refs/heads/publication' ], 

421 cwd = locations.project, 

422 check = False ).returncode == 0 

423 commit_result = __.subprocess.run( 

424 [ 'git', 'commit-tree', tree_hash, 

425 *( ( '-p', 'publication' ) if publication_exists else ( ) ), 

426 '-m', f"Update documents for publication. ({version})" ], 

427 cwd = locations.project, 

428 check = True, capture_output = True, text = True ) 

429 commit_hash = commit_result.stdout.strip( ) 

430 __.subprocess.run( 

431 [ 'git', 'branch', '--force', 'publication', commit_hash ], 

432 cwd = locations.project, 

433 check = True ) 

434 __.subprocess.run( 

435 [ 'git', 'push', 'origin', 'publication:publication' ], 

436 cwd = locations.project, 

437 check = True ) 

438 

439 

440def _update_index_html( 

441 locations: Locations, 

442 j2context: _jinja2.Environment, 

443 data: dict[ __.typx.Any, __.typx.Any ], 

444) -> None: 

445 ''' Updates index.html with version information. 

446 

447 Generates the main index page showing all available versions and their 

448 associated documentation and coverage reports. 

449 ''' 

450 template = j2context.get_template( 'website.html.jinja' ) 

451 # TODO: Add error handling for template rendering failures. 

452 with locations.index.open( 'w' ) as file: 

453 file.write( template.render( **data ) ) 

454 

455 

456def _update_version_coverage_badge( 

457 locations: Locations, j2context: _jinja2.Environment, version: str 

458) -> None: 

459 ''' Updates version-specific coverage badge SVG. 

460 

461 Generates a coverage badge for the specific version and places it 

462 in the version's subtree. This allows each version to have its own 

463 coverage badge accessible at version/coverage.svg. 

464 ''' 

465 svg_content = _generate_coverage_badge_svg( locations, j2context ) 

466 version_coverage_path = locations.website / version / 'coverage.svg' 

467 with version_coverage_path.open( 'w' ) as file: 

468 file.write( svg_content ) 

469 

470 

471def _update_versions_json( 

472 locations: Locations, 

473 version: str, 

474 species: tuple[ str, ... ], 

475) -> dict[ __.typx.Any, __.typx.Any ]: 

476 ''' Updates versions.json with new version information. 

477 

478 Maintains a JSON file tracking all versions and their available 

479 documentation types. Versions are sorted in descending order, with 

480 the latest version marked separately. 

481 ''' 

482 # TODO: Add validation of version string format. 

483 # TODO: Consider file locking for concurrent update protection. 

484 from packaging.version import Version 

485 if not locations.versions.is_file( ): 

486 data: dict[ __.typx.Any, __.typx.Any ] = { 'versions': { } } 

487 with locations.versions.open( 'w' ) as file: 

488 __.json.dump( data, file, indent = 4 ) 

489 with locations.versions.open( 'r+' ) as file: 

490 data = __.json.load( file ) 

491 versions = data[ 'versions' ] 

492 versions[ version ] = species 

493 versions = dict( sorted( 

494 versions.items( ), 

495 key = lambda entry: Version( entry[ 0 ] ), 

496 reverse = True ) ) 

497 data[ 'latest_version' ] = next( iter( versions ) ) 

498 data[ 'versions' ] = versions 

499 file.seek( 0 ) 

500 __.json.dump( data, file, indent = 4 ) 

501 file.truncate( ) 

502 return data