def download_file(url, dest_path, expected_size=None, expected_sha1=None): """Download a file with optional size and hash verification.""" dest_path = Path(dest_path) dest_path.parent.mkdir(parents=True, exist_ok=True)
import os import sys import json import hashlib import subprocess import argparse import concurrent.futures from pathlib import Path from urllib.parse import urljoin import requests from typing import Dict, List, Optional
def download_uup_files(uup_data: Dict, work_dir: Path, edition: str): """Download all required CAB/PSF files for given edition.""" files = uup_data.get("files", []) edition_files = [f for f in files if edition in f.get("editions", [])] if not edition_files: raise ValueError(f"No files found for edition {edition}") download_list = [] for f in edition_files: url = f["url"] local_path = work_dir / "uup_files" / f["name"] download_list.append((url, local_path, f.get("size"), f.get("sha1"))) print(f"Downloading {len(download_list)} files for {edition}") download_files_parallel(download_list, work_dir / "uup_files") return work_dir / "uup_files"
# ------------------------------ # UUP operations # ------------------------------ def fetch_uup_info(build: str, lang: str, edition: str) -> Dict: """Get file list + metadata from UUPdump API.""" url = UUP_FILE_LIST_URL.format(build=build, lang=lang) print(f"Fetching file list from {url}") resp = requests.get(url) resp.raise_for_status() data = resp.json() # data structure example: # { # "files": [{"name": "file.cab", "size": 123, "sha1": "...", "url": "..."}], # "editions": ["Pro", "Home"], # "build": "22621.1" # } return data
# ------------------------------ # Main CLI # ------------------------------ def main(): parser = argparse.ArgumentParser(description="UUPdump-style Windows ISO builder") parser.add_argument("build", help="Build number, e.g., 22621.1") parser.add_argument("lang", help="Language code, e.g., en-us") parser.add_argument("edition", help="Edition, e.g., Professional") parser.add_argument("--out", "-o", help="Output ISO path", default="windows_install.iso") parser.add_argument("--work-dir", help="Working directory", default="UUP_workspace") parser.add_argument("--keep-temp", action="store_true", help="Keep temporary files") args = parser.parse_args() work_dir = Path(args.work_dir) work_dir.mkdir(exist_ok=True) try: print(f"Fetching UUP info for build {args.build}, lang {args.lang}, edition {args.edition}") uup_info = fetch_uup_info(args.build, args.lang, args.edition) print("Downloading UUP files...") uup_files_dir = download_uup_files(uup_info, work_dir, args.edition) print("Converting to ISO...") convert_to_iso(uup_files_dir, args.edition, Path(args.out), keep_temp=args.keep_temp) print("Done.") except Exception as e: print(f"Error: {e}") sys.exit(1)
#!/usr/bin/env python3 """ UUPdump-style tool: Fetch UUP set, download files, convert to ISO. Requires: requests, python-gnupg (optional), plus external tools: cabextract, wimlib, mkisofs/genisoimage """
# Resume support headers = {} if dest_path.exists(): existing_size = dest_path.stat().st_size if expected_size and existing_size == expected_size: print(f" [SKIP] {dest_path.name} already complete") return True headers['Range'] = f'bytes={existing_size}-' print(f" [DL] {url}") resp = requests.get(url, stream=True, headers=headers) resp.raise_for_status() mode = 'ab' if 'Range' in headers else 'wb' with open(dest_path, mode) as f: for chunk in resp.iter_content(chunk_size=8192): f.write(chunk) # Verify size if expected_size and dest_path.stat().st_size != expected_size: raise ValueError(f"Size mismatch for {dest_path.name}") # Verify SHA‑1 if expected_sha1: sha1 = hashlib.sha1() with open(dest_path, 'rb') as f: for chunk in iter(lambda: f.read(65536), b''): sha1.update(chunk) if sha1.hexdigest() != expected_sha1: raise ValueError(f"SHA‑1 mismatch for {dest_path.name}") return True
def download_files_parallel(file_list, download_dir, max_workers=8): """Download list of (url, path, size, sha1) in parallel.""" with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: futures = [] for url, path, size, sha1 in file_list: futures.append(executor.submit(download_file, url, path, size, sha1)) for future in concurrent.futures.as_completed(futures): future.result() # raise if any failed
