@@ -448,61 +448,73 @@ pub async fn command_upload_release_distributions(args: &ArgMatches) -> Result<(
448448 return Err ( anyhow ! ( "missing {} release artifacts" , missing. len( ) ) ) ;
449449 }
450450
451- let ( client, token) = new_github_client ( args) ?;
452- let repo_handler = client. repos ( organization, repo) ;
453- let releases = repo_handler. releases ( ) ;
451+ let mut digests = BTreeMap :: new ( ) ;
454452
455- let release = if let Ok ( release) = releases. get_by_tag ( tag) . await {
456- release
457- } else {
458- return if dry_run {
459- println ! ( "release {tag} does not exist; exiting dry-run mode..." ) ;
460- Ok ( ( ) )
461- } else {
462- Err ( anyhow ! (
463- "release {tag} does not exist; create it via GitHub web UI"
464- ) )
453+ for ( source, dest) in & wanted_filenames {
454+ if !filenames. contains ( source) {
455+ continue ;
456+ }
457+
458+ let local_filename = dist_dir. join ( source) ;
459+
460+ // Compute digests in a separate pass so we can always materialize
461+ // SHA256SUMS locally before any GitHub interaction, including in dry-run
462+ // mode. This also avoids trying to reuse the streamed upload body for hashing.
463+ let digest = {
464+ let file = tokio:: fs:: File :: open ( local_filename) . await ?;
465+ let mut stream = tokio_util:: io:: ReaderStream :: with_capacity ( file, 1048576 ) ;
466+ let mut hasher = Sha256 :: new ( ) ;
467+ while let Some ( chunk) = stream. next ( ) . await {
468+ hasher. update ( & chunk?) ;
469+ }
470+ hex:: encode ( hasher. finalize ( ) )
465471 } ;
466- } ;
472+ digests. insert ( dest. clone ( ) , digest) ;
473+ }
467474
468- let mut digests = BTreeMap :: new ( ) ;
475+ let shasums = digests
476+ . iter ( )
477+ . map ( |( filename, digest) | format ! ( "{digest} {filename}\n " ) )
478+ . collect :: < Vec < _ > > ( )
479+ . join ( "" ) ;
480+
481+ std:: fs:: write ( dist_dir. join ( "SHA256SUMS" ) , shasums. as_bytes ( ) ) ?;
482+
483+ if dry_run {
484+ println ! ( "wrote local SHA256SUMS; skipping GitHub upload and verification" ) ;
485+ return Ok ( ( ) ) ;
486+ }
487+
488+ let ( client, token) = new_github_client ( args) ?;
489+ let repo_handler = client. repos ( organization, repo) ;
490+ let releases = repo_handler. releases ( ) ;
491+ let release = releases
492+ . get_by_tag ( tag)
493+ . await
494+ . map_err ( |_| anyhow ! ( "release {tag} does not exist; create it via GitHub web UI" ) ) ?;
469495
470496 let retry_policy = ExponentialBackoff :: builder ( ) . build_with_max_retries ( 5 ) ;
471497 let raw_client = Client :: new ( ) ;
472498
473499 {
474500 let mut fs = vec ! [ ] ;
475501
476- for ( source, dest) in wanted_filenames {
477- if !filenames. contains ( & source) {
502+ for ( source, dest) in & wanted_filenames {
503+ if !filenames. contains ( source) {
478504 continue ;
479505 }
480506
481- let local_filename = dist_dir. join ( & source) ;
507+ let local_filename = dist_dir. join ( source) ;
482508 fs. push ( upload_release_artifact (
483509 & raw_client,
484510 & retry_policy,
485511 & GitHubUploadRetryStrategy ,
486512 token. clone ( ) ,
487513 & release,
488514 dest. clone ( ) ,
489- UploadSource :: Filename ( local_filename. clone ( ) ) ,
515+ UploadSource :: Filename ( local_filename) ,
490516 dry_run,
491517 ) ) ;
492-
493- // reqwest wants to take ownership of the body, so it's hard for us to do anything
494- // clever with reading the file once and calculating the sha256sum while we read.
495- // So we open and read the file again.
496- let digest = {
497- let file = tokio:: fs:: File :: open ( local_filename) . await ?;
498- let mut stream = tokio_util:: io:: ReaderStream :: with_capacity ( file, 1048576 ) ;
499- let mut hasher = Sha256 :: new ( ) ;
500- while let Some ( chunk) = stream. next ( ) . await {
501- hasher. update ( & chunk?) ;
502- }
503- hex:: encode ( hasher. finalize ( ) )
504- } ;
505- digests. insert ( dest. clone ( ) , digest. clone ( ) ) ;
506518 }
507519
508520 let mut buffered = futures:: stream:: iter ( fs) . buffer_unordered ( 16 ) ;
@@ -512,14 +524,6 @@ pub async fn command_upload_release_distributions(args: &ArgMatches) -> Result<(
512524 }
513525 }
514526
515- let shasums = digests
516- . iter ( )
517- . map ( |( filename, digest) | format ! ( "{digest} {filename}\n " ) )
518- . collect :: < Vec < _ > > ( )
519- . join ( "" ) ;
520-
521- std:: fs:: write ( dist_dir. join ( "SHA256SUMS" ) , shasums. as_bytes ( ) ) ?;
522-
523527 upload_release_artifact (
524528 & raw_client,
525529 & retry_policy,
@@ -534,11 +538,6 @@ pub async fn command_upload_release_distributions(args: &ArgMatches) -> Result<(
534538
535539 // Check that content wasn't munged as part of uploading. This once happened
536540 // and created a busted release. Never again.
537- if dry_run {
538- println ! ( "skipping SHA256SUMs check" ) ;
539- return Ok ( ( ) ) ;
540- }
541-
542541 let release = releases
543542 . get_by_tag ( tag)
544543 . await
0 commit comments