Created
July 20, 2025 22:38
-
-
Save itsalljustdata/65c1425d03015ce520f5b5728bf17dc0 to your computer and use it in GitHub Desktop.
Downloads whl files from azure devops artifact feed
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python | |
| # | |
| """ | |
| requirements.txt : | |
| requests | |
| pydantic_settings | |
| glom | |
| need a .env file with | |
| DEVOPS_FEED: str | |
| DEVOPS_PROJECT: str | |
| DEVOPS_ORG: str | |
| DEVOPS_PAT: str | |
| DEVOPS_FEED is the name itself, not the full URL | |
| """ | |
| import requests | |
| import json | |
| from pydantic_settings import BaseSettings | |
| from glom import glom | |
| from pathlib import Path | |
| class Settings(BaseSettings): | |
| DEVOPS_FEED: str | |
| DEVOPS_PROJECT: str | |
| DEVOPS_ORG: str | |
| DEVOPS_PAT: str | |
| class Config: | |
| env_file = ".env" | |
| settings = Settings() | |
| def buildURL(prefix: str = 'feeds', parts : list[str] = []) -> str: | |
| """Construct the top URL for the Azure DevOps feed.""" | |
| if prefix not in ['feeds', 'pkgs']: | |
| raise ValueError(f"Prefix must be either 'feeds' or 'pkgs' : '{prefix}'") | |
| url = f"https://{prefix}.dev.azure.com/{settings.DEVOPS_ORG}/{settings.DEVOPS_PROJECT}/_apis/packaging/feeds/{settings.DEVOPS_FEED}" | |
| if isinstance(parts, str): | |
| parts = [parts,] | |
| if len(parts) > 0: | |
| url += '/'.join(['',]+parts) | |
| return url | |
| def getFeedJSON(*parts : str): | |
| url = buildURL(prefix= 'feeds', parts = parts) | |
| response = requests.get(url, auth=("", settings.DEVOPS_PAT)) | |
| response.raise_for_status() # Raise an error for bad responses | |
| data = response.json() | |
| if isinstance(data, dict) and 'value' in data and 'count' in data: | |
| if data['count'] == 0: | |
| raise ValueError(f"No data found at {url}") | |
| # If the response is paginated, return the 'value' list | |
| return data['value'] | |
| return data | |
| def getFeedPackages(package_names: list[str]|None|str = None): | |
| packages = getFeedJSON("packages") | |
| if package_names is None: | |
| package_names = [] | |
| elif isinstance(package_names, str): | |
| package_names = [package_names,] | |
| package_names = [p for p in package_names if p] | |
| if isinstance(package_names,list) and len(package_names) > 0: | |
| packages = [p for p in packages if p['normalizedName'] in package_names] | |
| if len(packages) != len(package_names): | |
| missing_names = set(package_names) - {p['normalizedName'] for p in packages} | |
| raise ValueError(f"Some packages not found in feed '{settings.DEVOPS_FEED}': {', '.join(missing_names)}") | |
| return packages | |
| def get_all_from_feed(package_version: str|None = None): | |
| outputFiles = [] | |
| for package in getFeedPackages(): | |
| try: | |
| outputFiles.extend(getFeedPackageVersionFiles(package_name = package['normalizedName'], package_version = package_version)) | |
| except: | |
| print(f"Failed to download package {package['normalizedName']}. It may not have any versions or files.") | |
| return outputFiles | |
| def getFeedPackageVersions(package_name: str): | |
| package = next(iter([p for p in getFeedPackages() if p['normalizedName'] == package_name]), None) | |
| if not package: | |
| raise ValueError(f"Package '{package_name}' not found in feed '{settings.DEVOPS_FEED}'") | |
| return getFeedJSON('packages',package['id'],'versions') | |
| def getFeedPackageVersion(package_name: str, package_version: str|None = None): | |
| versions = getFeedPackageVersions(package_name) | |
| if not package_version: | |
| reqdVersion = [v for v in versions if v['isLatest'] == True] | |
| else: | |
| reqdVersion = [v for v in versions if v['version'] == package_version] | |
| reqdVersion = next(iter(reqdVersion), {}) | |
| if not reqdVersion: | |
| raise ValueError(f"Version '{package_version}' of package '{package_name}' not found in feed '{settings.DEVOPS_FEED}'") | |
| return reqdVersion | |
| def getFeedPackageVersionFiles(package_name: str, package_version: str|None = None, wheel_only: bool = True): | |
| version = getFeedPackageVersion(package_name = package_name, package_version = package_version) | |
| files = [f for f in version.get('files', [])] | |
| if wheel_only: | |
| files = [f for f in files if glom(f, 'protocolMetadata.data.fileType') == 'bdist_wheel'] | |
| if not files: | |
| raise ValueError(f"No files found for package '{package_name}' version '{package_version}' in feed '{settings.DEVOPS_FEED}'") | |
| outputFiles = [] | |
| outputPath = Path('.').joinpath(settings.DEVOPS_FEED,package_name) | |
| outputPath.mkdir(parents=True, exist_ok=True) | |
| for wheel_file in files: | |
| urlWheel = buildURL(prefix='pkgs',parts=['pypi','packages',package_name,'versions',version['version'],wheel_file['name'],'content']) | |
| # urlWheel = f"{buildURL('pkgs')}/pypi/packages/{package_name}/versions/{version['version']}/{wheel_file['name']}/content" | |
| wheelReq = requests.get(urlWheel, auth=("", settings.DEVOPS_PAT)) | |
| wheelReq.raise_for_status() # Ensure we raise an error for bad responses | |
| outputFile = outputPath.joinpath(wheel_file['name']) | |
| outputFile.write_bytes(wheelReq.content) | |
| outputFiles.append(outputFile) | |
| return outputFiles | |
| def cmdLine_call (): | |
| import argparse | |
| parser = argparse.ArgumentParser(description="Get packages from Azure DevOps Artifacts") | |
| parser.add_argument("--all", action="store_true", help="Download all packages") | |
| parser.add_argument("package_names", nargs="*", type=str, help="Name(s) of the packages to download") | |
| parser.add_argument("--version", type=str, default=None, help="Version of the package to download (default: None (latest))") | |
| args = parser.parse_args() | |
| if isinstance(args.version,str) and args.version.upper() == "LATEST": | |
| args.version = None | |
| if args.all and args.package_names: | |
| parser.error("Cannot use --all with specific package names.") | |
| elif not args.all and not args.package_names: | |
| parser.error("Must specify package names when not using --all.") | |
| if args.all: | |
| filePaths = get_all_from_feed() | |
| else: | |
| if isinstance(args.package_names, type(None)): | |
| raise ValueError("Package name must be provided when not using --all") | |
| elif isinstance(args.package_names, list) and len(args.package_names) > 1: | |
| # Check all packages are valid at the beginning (if >1 package name provided) | |
| _ = getFeedPackages(package_names=args.package_names) | |
| elif isinstance(args.package_names, str): | |
| args.package_name = [args.package_names,] | |
| filePaths = [] | |
| for package_name in sorted(args.package_names): | |
| filePaths.extend(getFeedPackageVersionFiles(package_name = package_name, package_version = args.version)) | |
| if len(filePaths) == 0: | |
| print("No files downloaded.") | |
| elif len(filePaths) == 1: | |
| print(f"Downloaded file: {filePaths[0]}") | |
| else: | |
| sep = '\n - ' | |
| print (f"Downloaded files ({len(filePaths)}): {sep.join(str(p) for p in ['',]+filePaths)}") | |
| if __name__ == "__main__": | |
| cmdLine_call() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment