Coverage for sources / emcdproj / website.py: 68%

210 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2025-12-04 00:48 +0000

1# vim: set filetype=python fileencoding=utf-8: 

2# -*- coding: utf-8 -*- 

3 

4#============================================================================# 

5# # 

6# Licensed under the Apache License, Version 2.0 (the "License"); # 

7# you may not use this file except in compliance with the License. # 

8# You may obtain a copy of the License at # 

9# # 

10# http://www.apache.org/licenses/LICENSE-2.0 # 

11# # 

12# Unless required by applicable law or agreed to in writing, software # 

13# distributed under the License is distributed on an "AS IS" BASIS, # 

14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # 

15# See the License for the specific language governing permissions and # 

16# limitations under the License. # 

17# # 

18#============================================================================# 

19 

20 

21''' Static website maintenance utilities for projects. ''' 

22 

23 

24import jinja2 as _jinja2 

25 

26from . import __ 

27from . import exceptions as _exceptions 

28from . import interfaces as _interfaces 

29 

30 

31class SurveyCommand( 

32 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

33): 

34 ''' Surveys release versions published in static website. ''' 

35 

36 use_extant: __.typx.Annotated[ 

37 bool, 

38 __.typx.Doc( ''' Fetch publication branch and use tarball. ''' ), 

39 ] = False 

40 

41 async def __call__( 

42 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

43 ) -> None: 

44 survey( auxdata, use_extant = self.use_extant ) 

45 

46 

47class UpdateCommand( 

48 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

49): 

50 ''' Updates static website for particular release version. ''' 

51 

52 version: __.typx.Annotated[ 

53 str, 

54 __.typx.Doc( ''' Release version to update. ''' ), 

55 __.tyro.conf.Positional, 

56 ] 

57 

58 use_extant: __.typx.Annotated[ 

59 bool, 

60 __.typx.Doc( ''' Fetch publication branch and use tarball. ''' ), 

61 ] = False 

62 

63 production: __.typx.Annotated[ 

64 bool, 

65 __.typx.Doc( ''' Update publication branch with new tarball. 

66 Implies --use-extant to prevent data loss. ''' ), 

67 ] = False 

68 

69 async def __call__( 

70 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

71 ) -> None: 

72 update( 

73 auxdata, self.version, 

74 use_extant = self.use_extant, 

75 production = self.production ) 

76 

77 

78class CommandDispatcher( 

79 _interfaces.CliCommand, decorators = ( __.standard_tyro_class, ), 

80): 

81 ''' Dispatches commands for static website maintenance. ''' 

82 

83 command: __.typx.Union[ 

84 __.typx.Annotated[ 

85 SurveyCommand, 

86 __.tyro.conf.subcommand( 'survey', prefix_name = False ), 

87 ], 

88 __.typx.Annotated[ 

89 UpdateCommand, 

90 __.tyro.conf.subcommand( 'update', prefix_name = False ), 

91 ], 

92 ] 

93 

94 async def __call__( 

95 self, auxdata: __.Globals, display: _interfaces.ConsoleDisplay 

96 ) -> None: 

97 ictr( 1 )( self.command ) 

98 await self.command( auxdata = auxdata, display = display ) 

99 

100 

101class Locations( __.immut.DataclassObject ): 

102 ''' Locations associated with website maintenance. ''' 

103 

104 project: __.Path 

105 auxiliary: __.Path 

106 publications: __.Path 

107 archive: __.Path 

108 artifacts: __.Path 

109 website: __.Path 

110 coverage: __.Path 

111 index: __.Path 

112 versions: __.Path 

113 templates: __.Path 

114 

115 @classmethod 

116 def from_project_anchor( 

117 selfclass, 

118 auxdata: __.Globals, 

119 anchor: __.Absential[ __.Path ] = __.absent, 

120 ) -> __.typx.Self: 

121 ''' Produces locations from project anchor, if provided. 

122 

123 If project anchor is not given, then attempt to discover it. 

124 ''' 

125 if __.is_absent( anchor ): 125 ↛ 128line 125 didn't jump to line 128 because the condition on line 125 was never true

126 # TODO: Discover missing anchor via directory traversal, 

127 # seeking VCS markers. 

128 project = __.Path( ).resolve( strict = True ) 

129 else: project = anchor.resolve( strict = True ) 

130 auxiliary = project / '.auxiliary' 

131 publications = auxiliary / 'publications' 

132 templates = auxdata.distribution.provide_data_location( 'templates' ) 

133 return selfclass( 

134 project = project, 

135 auxiliary = auxiliary, 

136 publications = publications, 

137 archive = publications / 'website.tar.xz', 

138 artifacts = auxiliary / 'artifacts', 

139 website = auxiliary / 'artifacts/website', 

140 coverage = auxiliary / 'artifacts/website/coverage.svg', 

141 index = auxiliary / 'artifacts/website/index.html', 

142 versions = auxiliary / 'artifacts/website/versions.json', 

143 templates = templates ) 

144 

145 

146def survey( 

147 auxdata: __.Globals, *, 

148 project_anchor: __.Absential[ __.Path ] = __.absent, 

149 use_extant: bool = False 

150) -> None: 

151 ''' Surveys release versions published in static website. 

152 

153 Lists all versions from the versions manifest, showing their 

154 available documentation types and highlighting the latest version. 

155 ''' 

156 locations = Locations.from_project_anchor( auxdata, project_anchor ) 

157 if use_extant: 

158 _fetch_publication_branch_and_tarball( locations ) 

159 # Extract the fetched tarball to view published versions 

160 if locations.archive.is_file( ): 

161 from tarfile import open as tarfile_open 

162 if locations.website.is_dir( ): 

163 __.shutil.rmtree( locations.website ) 

164 locations.website.mkdir( exist_ok = True, parents = True ) 

165 with tarfile_open( locations.archive, 'r:xz' ) as archive: 

166 archive.extractall( path = locations.website ) # noqa: S202 

167 if not locations.versions.is_file( ): 

168 context = "published" if use_extant else "local" 

169 print( f"No versions manifest found for {context} website. " 

170 f"Run 'website update' first." ) 

171 return 

172 with locations.versions.open( 'r' ) as file: 

173 data = __.json.load( file ) 

174 versions = data.get( 'versions', { } ) 

175 latest = data.get( 'latest_version' ) 

176 if not versions: 

177 context = "published" if use_extant else "local" 

178 print( f"No versions found in {context} manifest." ) 

179 return 

180 context = "Published" if use_extant else "Local" 

181 print( f"{context} versions:" ) 

182 for version, species in versions.items( ): 

183 marker = " (latest)" if version == latest else "" 

184 species_list = ', '.join( species ) if species else "none" 

185 print( f" {version}{marker}: {species_list}" ) 

186 

187 

188def update( 

189 auxdata: __.Globals, 

190 version: str, *, 

191 project_anchor: __.Absential[ __.Path ] = __.absent, 

192 use_extant: bool = False, 

193 production: bool = False 

194) -> None: 

195 ''' Updates project website with latest documentation and coverage. 

196 

197 Processes the specified version, copies documentation artifacts, 

198 updates version information, and generates coverage badges. 

199 ''' 

200 ictr( 2 )( version ) 

201 # TODO: Validate version string format. 

202 from tarfile import open as tarfile_open 

203 locations = Locations.from_project_anchor( auxdata, project_anchor ) 

204 locations.publications.mkdir( exist_ok = True, parents = True ) 

205 # --production implies --use-extant to prevent clobbering existing versions 

206 if use_extant or production: 206 ↛ 207line 206 didn't jump to line 207 because the condition on line 206 was never true

207 _fetch_publication_branch_and_tarball( locations ) 

208 if locations.website.is_dir( ): __.shutil.rmtree( locations.website ) 

209 locations.website.mkdir( exist_ok = True, parents = True ) 

210 if locations.archive.is_file( ): 

211 with tarfile_open( locations.archive, 'r:xz' ) as archive: 

212 archive.extractall( path = locations.website ) # noqa: S202 

213 available_species = _update_available_species( locations, version ) 

214 j2context = _jinja2.Environment( 

215 loader = _jinja2.FileSystemLoader( locations.templates ), 

216 autoescape = True ) 

217 index_data = _update_versions_json( locations, version, available_species ) 

218 _enhance_index_data_with_stable_dev( index_data ) 

219 _create_stable_dev_directories( locations, index_data ) 

220 _update_index_html( locations, j2context, index_data ) 

221 if ( locations.artifacts / 'coverage-pytest' ).is_dir( ): 

222 _update_coverage_badge( locations, j2context ) 

223 _update_version_coverage_badge( locations, j2context, version ) 

224 ( locations.website / '.nojekyll' ).touch( ) 

225 from .filesystem import chdir 

226 with chdir( locations.website ): # noqa: SIM117 

227 with tarfile_open( locations.archive, 'w:xz' ) as archive: 

228 archive.add( '.' ) 

229 if production: _update_publication_branch( locations, version ) 

230 

231 

232def _create_stable_dev_directories( 

233 locations: Locations, data: dict[ __.typx.Any, __.typx.Any ] 

234) -> None: 

235 ''' Creates stable/ and development/ directories with current releases. 

236 

237 Copies the content from the identified stable and development versions 

238 to stable/ and development/ directories to provide persistent URLs 

239 that don't change when new versions are released. 

240 ''' 

241 stable_version = data.get( 'stable_version' ) 

242 development_version = data.get( 'development_version' ) 

243 if stable_version: 243 ↛ 250line 243 didn't jump to line 250 because the condition on line 243 was always true

244 stable_source = locations.website / stable_version 

245 stable_dest = locations.website / 'stable' 

246 if stable_dest.is_dir( ): 

247 __.shutil.rmtree( stable_dest ) 

248 if stable_source.is_dir( ): 248 ↛ 250line 248 didn't jump to line 250 because the condition on line 248 was always true

249 __.shutil.copytree( stable_source, stable_dest ) 

250 if development_version: 250 ↛ 251line 250 didn't jump to line 251 because the condition on line 250 was never true

251 dev_source = locations.website / development_version 

252 dev_dest = locations.website / 'development' 

253 if dev_dest.is_dir( ): 

254 __.shutil.rmtree( dev_dest ) 

255 if dev_source.is_dir( ): 

256 __.shutil.copytree( dev_source, dev_dest ) 

257 

258 

259def _enhance_index_data_with_stable_dev( 

260 data: dict[ __.typx.Any, __.typx.Any ] 

261) -> None: 

262 ''' Enhances index data with stable/development version information. 

263 

264 Identifies the latest stable release and latest development version 

265 from the versions data and adds them as separate entries for the 

266 stable/development table. 

267 ''' 

268 from packaging.version import Version 

269 versions = data.get( 'versions', { } ) 

270 if not versions: 270 ↛ 271line 270 didn't jump to line 271 because the condition on line 270 was never true

271 data[ 'stable_dev_versions' ] = { } 

272 return 

273 stable_version = None 

274 development_version = None 

275 # Sort versions by packaging.version.Version for proper comparison 

276 sorted_versions = sorted( 

277 versions.items( ), 

278 key = lambda entry: Version( entry[ 0 ] ), 

279 reverse = True ) 

280 # Find latest stable (non-prerelease) and development (prerelease) versions 

281 for version_string, species in sorted_versions: 

282 version_obj = Version( version_string ) 

283 if not version_obj.is_prerelease and stable_version is None: 283 ↛ 285line 283 didn't jump to line 285 because the condition on line 283 was always true

284 stable_version = ( version_string, species ) 

285 if version_obj.is_prerelease and development_version is None: 285 ↛ 286line 285 didn't jump to line 286 because the condition on line 285 was never true

286 development_version = ( version_string, species ) 

287 if stable_version and development_version: 287 ↛ 288line 287 didn't jump to line 288 because the condition on line 287 was never true

288 break 

289 stable_dev_versions: dict[ str, tuple[ str, ... ] ] = { } 

290 if stable_version: 290 ↛ 293line 290 didn't jump to line 293 because the condition on line 290 was always true

291 stable_dev_versions[ 'stable (current)' ] = stable_version[ 1 ] 

292 data[ 'stable_version' ] = stable_version[ 0 ] 

293 if development_version: 293 ↛ 294line 293 didn't jump to line 294 because the condition on line 293 was never true

294 stable_dev_versions[ 'development (current)' ] = ( 

295 development_version[ 1 ] ) 

296 data[ 'development_version' ] = development_version[ 0 ] 

297 data[ 'stable_dev_versions' ] = stable_dev_versions 

298 

299 

300def _extract_coverage( locations: Locations ) -> int: 

301 ''' Extracts coverage percentage from coverage report. 

302 

303 Reads the coverage XML report and calculates the overall line coverage 

304 percentage, rounded down to the nearest integer. 

305 ''' 

306 location = locations.artifacts / 'coverage-pytest/coverage.xml' 

307 if not location.exists( ): raise _exceptions.FileAwol( location ) 

308 from defusedxml import ElementTree 

309 root = ElementTree.parse( location ).getroot( ) # pyright: ignore 

310 if root is None: 

311 raise _exceptions.FileEmpty( location ) # pragma: no cover 

312 line_rate = root.get( 'line-rate' ) 

313 if not line_rate: 

314 raise _exceptions.FileDataAwol( 

315 location, 'line-rate' ) # pragma: no cover 

316 return __.math.floor( float( line_rate ) * 100 ) 

317 

318 

319def _fetch_publication_branch_and_tarball( locations: Locations ) -> None: 

320 ''' Fetches publication branch and checks out existing tarball. 

321 

322 Attempts to fetch the publication branch from origin and checkout 

323 the website tarball. Ignores failures if branch or tarball don't exist. 

324 ''' 

325 with __.ctxl.suppress( Exception ): 

326 __.subprocess.run( 

327 [ 'git', 'fetch', 'origin', 'publication:publication' ], 

328 cwd = locations.project, 

329 check = False, 

330 capture_output = True ) 

331 with __.ctxl.suppress( Exception ): 

332 __.subprocess.run( 

333 [ 'git', 'checkout', 'publication', '--', 

334 str( locations.archive ) ], 

335 cwd = locations.project, 

336 check = False, 

337 capture_output = True ) 

338 

339 

340def _generate_coverage_badge_svg( 

341 locations: Locations, j2context: _jinja2.Environment 

342) -> str: 

343 ''' Generates coverage badge SVG content. 

344 

345 Returns the rendered SVG content for a coverage badge based on the 

346 current coverage percentage. Colors indicate coverage quality: 

347 - red: < 50% 

348 - yellow: 50-79% 

349 - green: >= 80% 

350 ''' 

351 coverage = _extract_coverage( locations ) 

352 color = ( 

353 'red' if coverage < 50 else ( # noqa: PLR2004 

354 'yellow' if coverage < 80 else 'green' ) ) # noqa: PLR2004 

355 label_text = 'coverage' 

356 value_text = f"{coverage}%" 

357 label_width = len( label_text ) * 6 + 10 

358 value_width = len( value_text ) * 6 + 15 

359 total_width = label_width + value_width 

360 template = j2context.get_template( 'coverage.svg.jinja' ) 

361 # TODO: Add error handling for template rendering failures. 

362 return template.render( 

363 color = color, 

364 total_width = total_width, 

365 label_text = label_text, 

366 value_text = value_text, 

367 label_width = label_width, 

368 value_width = value_width ) 

369 

370 

371def _update_available_species( 

372 locations: Locations, version: str 

373) -> tuple[ str, ... ]: 

374 available_species: list[ str ] = [ ] 

375 for species in ( 'coverage-pytest', 'sphinx-html' ): 

376 origin = locations.artifacts / species 

377 if not origin.is_dir( ): continue 

378 destination = locations.website / version / species 

379 if destination.is_dir( ): __.shutil.rmtree( destination ) 

380 __.shutil.copytree( origin, destination ) 

381 available_species.append( species ) 

382 return tuple( available_species ) 

383 

384 

385def _update_coverage_badge( 

386 locations: Locations, j2context: _jinja2.Environment 

387) -> None: 

388 ''' Updates coverage badge SVG. 

389 

390 Generates a color-coded coverage badge based on the current coverage 

391 percentage and writes it to the main coverage.svg location. 

392 ''' 

393 svg_content = _generate_coverage_badge_svg( locations, j2context ) 

394 with locations.coverage.open( 'w' ) as file: 

395 file.write( svg_content ) 

396 

397 

398def _update_publication_branch( locations: Locations, version: str ) -> None: 

399 ''' Updates publication branch with new tarball. 

400 

401 Adds the tarball to git, commits to the publication branch, and pushes 

402 to origin. Uses the same approach as the GitHub workflow. 

403 ''' 

404 __.subprocess.run( 

405 [ 'git', 'add', str( locations.archive ) ], 

406 cwd = locations.project, 

407 check = True ) 

408 # Commit to publication branch without checkout 

409 # Get current tree hash 

410 tree_result = __.subprocess.run( 

411 [ 'git', 'write-tree' ], 

412 cwd = locations.project, 

413 check = True, capture_output = True, text = True ) 

414 tree_hash = tree_result.stdout.strip( ) 

415 # Check if publication branch exists 

416 publication_exists = __.subprocess.run( 

417 [ 'git', 'show-ref', '--verify', '--quiet', 'refs/heads/publication' ], 

418 cwd = locations.project, 

419 check = False ).returncode == 0 

420 commit_result = __.subprocess.run( 

421 [ 'git', 'commit-tree', tree_hash, 

422 *( ( '-p', 'publication' ) if publication_exists else ( ) ), 

423 '-m', f"Update documents for publication. ({version})" ], 

424 cwd = locations.project, 

425 check = True, capture_output = True, text = True ) 

426 commit_hash = commit_result.stdout.strip( ) 

427 __.subprocess.run( 

428 [ 'git', 'branch', '--force', 'publication', commit_hash ], 

429 cwd = locations.project, 

430 check = True ) 

431 __.subprocess.run( 

432 [ 'git', 'push', 'origin', 'publication:publication' ], 

433 cwd = locations.project, 

434 check = True ) 

435 

436 

437def _update_index_html( 

438 locations: Locations, 

439 j2context: _jinja2.Environment, 

440 data: dict[ __.typx.Any, __.typx.Any ], 

441) -> None: 

442 ''' Updates index.html with version information. 

443 

444 Generates the main index page showing all available versions and their 

445 associated documentation and coverage reports. 

446 ''' 

447 template = j2context.get_template( 'website.html.jinja' ) 

448 # TODO: Add error handling for template rendering failures. 

449 with locations.index.open( 'w' ) as file: 

450 file.write( template.render( **data ) ) 

451 

452 

453def _update_version_coverage_badge( 

454 locations: Locations, j2context: _jinja2.Environment, version: str 

455) -> None: 

456 ''' Updates version-specific coverage badge SVG. 

457 

458 Generates a coverage badge for the specific version and places it 

459 in the version's subtree. This allows each version to have its own 

460 coverage badge accessible at version/coverage.svg. 

461 ''' 

462 svg_content = _generate_coverage_badge_svg( locations, j2context ) 

463 version_coverage_path = locations.website / version / 'coverage.svg' 

464 with version_coverage_path.open( 'w' ) as file: 

465 file.write( svg_content ) 

466 

467 

468def _update_versions_json( 

469 locations: Locations, 

470 version: str, 

471 species: tuple[ str, ... ], 

472) -> dict[ __.typx.Any, __.typx.Any ]: 

473 ''' Updates versions.json with new version information. 

474 

475 Maintains a JSON file tracking all versions and their available 

476 documentation types. Versions are sorted in descending order, with 

477 the latest version marked separately. 

478 ''' 

479 # TODO: Add validation of version string format. 

480 # TODO: Consider file locking for concurrent update protection. 

481 from packaging.version import Version 

482 if not locations.versions.is_file( ): 

483 data: dict[ __.typx.Any, __.typx.Any ] = { 'versions': { } } 

484 with locations.versions.open( 'w' ) as file: 

485 __.json.dump( data, file, indent = 4 ) 

486 with locations.versions.open( 'r+' ) as file: 

487 data = __.json.load( file ) 

488 versions = data[ 'versions' ] 

489 versions[ version ] = species 

490 versions = dict( sorted( 

491 versions.items( ), 

492 key = lambda entry: Version( entry[ 0 ] ), 

493 reverse = True ) ) 

494 data[ 'latest_version' ] = next( iter( versions ) ) 

495 data[ 'versions' ] = versions 

496 file.seek( 0 ) 

497 __.json.dump( data, file, indent = 4 ) 

498 file.truncate( ) 

499 return data