diff --git a/ddev/changelog.d/20330.added b/ddev/changelog.d/20330.added new file mode 100644 index 0000000000000..ddd43a20920c9 --- /dev/null +++ b/ddev/changelog.d/20330.added @@ -0,0 +1,2 @@ +- Added `ddev size create-dashboard` to visualize size metrics on the Datadog platform +- Added `--to-dd-org` option to `ddev size status` to send metrics to Datadog \ No newline at end of file diff --git a/ddev/changelog.d/20330.changed b/ddev/changelog.d/20330.changed new file mode 100644 index 0000000000000..a40858ffe69ce --- /dev/null +++ b/ddev/changelog.d/20330.changed @@ -0,0 +1 @@ +Replaced multiple format flags with a single `--format` option in the `ddev size` command. diff --git a/ddev/pyproject.toml b/ddev/pyproject.toml index 78378e34e80de..9e800a7565de9 100644 --- a/ddev/pyproject.toml +++ b/ddev/pyproject.toml @@ -42,7 +42,8 @@ dependencies = [ "tqdm", "requests", "matplotlib", - "squarify" + "squarify", + "datadog", ] dynamic = ["version"] diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py index 63ddba4fa4af8..1a2a0811165f6 100644 --- a/ddev/src/ddev/cli/size/__init__.py +++ b/ddev/src/ddev/cli/size/__init__.py @@ -4,6 +4,7 @@ import click +from ddev.cli.size.create_dashboard import create_dashboard from ddev.cli.size.diff import diff from ddev.cli.size.status import status from ddev.cli.size.timeline import timeline @@ -25,6 +26,7 @@ def size(): size.add_command(status) size.add_command(diff) size.add_command(timeline) +size.add_command(create_dashboard) if __name__ == "__main__": size() diff --git a/ddev/src/ddev/cli/size/create_dashboard.py b/ddev/src/ddev/cli/size/create_dashboard.py new file mode 100644 index 0000000000000..490e756fdbd0e --- /dev/null +++ b/ddev/src/ddev/cli/size/create_dashboard.py @@ -0,0 +1,144 @@ +import json +from typing import Any + +import click +import requests + +from ddev.cli.application import Application +from ddev.cli.size.utils.common_funcs import get_org, get_valid_platforms + + +@click.command() +@click.option( + "--dd-org", + type=str, + required=True, + help="Datadog organization name taken from your config file e.g. 'default'", +) +@click.pass_obj +def create_dashboard( + app: Application, + dd_org: str, +) -> None: + """ + Creates a Datadog dashboard to visualize size metrics for integrations and dependencies. + A new dashboard is created on each run. This command does not send data to Datadog. + To send metrics, use: `ddev size status --to-dd-org `. + """ + try: + config_file_info = get_org(app, dd_org) + if 'api_key' not in config_file_info: + raise RuntimeError("No API key found in config file") + if 'app_key' not in config_file_info: + raise RuntimeError("No APP key found in config file") + if 'site' not in config_file_info: + raise RuntimeError("No site found in config file") + headers = { + "DD-API-KEY": config_file_info["api_key"], + "DD-APPLICATION-KEY": config_file_info["app_key"], + "Content-Type": "application/json", + } + + payload = { + "title": "Disk Usage Status for Integrations and Dependencies", + "layout_type": "ordered", + "widgets": create_json(app), + } + + response = requests.post( + f"https://api.{config_file_info['site']}/api/v1/dashboard", + headers=headers, + data=json.dumps(payload), + ) + + resp_json = response.json() + if "Forbidden" in str(resp_json.get("errors", [])): + raise PermissionError("Access denied: your APP key doesn't have permission to create dashboards.") + print(f"Dashboard URL: https://app.{config_file_info['site']}{resp_json['url']}") + except Exception as e: + app.abort(str(e)) + + +def create_json(app: Application) -> list[dict[str, Any]]: + valid_platforms = get_valid_platforms(app.repo.path) + widgets: list[dict[str, Any]] = [] + + for size_type in ["compressed", "uncompressed"]: + for platform in valid_platforms: + # Treemap widget + widgets.append( + { + "definition": { + "type": "treemap", + "title": f"{size_type.capitalize()} sizes in {platform}", + "requests": [ + { + "queries": [ + { + "data_source": "metrics", + "name": "query2", + "query": f"avg:datadog.agent_integrations.size_analyzer.{size_type}" + f"{{platform:{platform}}} by {{name_type,name}}", + "aggregator": "last", + } + ], + "response_format": "scalar", + "style": {"palette": "classic"}, + "formulas": [ + { + "formula": "query2", + "number_format": { + "unit": { + "type": "canonical_unit", + "unit_name": "byte_in_binary_bytes_family", + } + }, + } + ], + } + ], + } + } + ) + # Timeseries widget + widgets.append( + { + "definition": { + "title": f"Timeline of {size_type} sizes in {platform}", + "type": "timeseries", + "requests": [ + { + "response_format": "timeseries", + "queries": [ + { + "name": "query1", + "data_source": "metrics", + "query": f"sum:datadog.agent_integrations.size_analyzer.{size_type}" + f"{{platform:{platform}}}", + } + ], + "formulas": [ + { + "formula": "query1", + "number_format": { + "unit": { + "type": "canonical_unit", + "unit_name": "byte_in_binary_bytes_family", + } + }, + } + ], + "style": { + "palette": "dog_classic", + "order_by": "values", + "line_type": "solid", + "line_width": "normal", + }, + "display_type": "line", + } + ], + } + } + ) + + return widgets diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py index 479e89e78742a..2595cf5bcd180 100644 --- a/ddev/src/ddev/cli/size/diff.py +++ b/ddev/src/ddev/cli/size/diff.py @@ -2,31 +2,29 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import os from datetime import datetime -from typing import Literal, Optional, overload +from typing import Optional import click from rich.console import Console from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn from ddev.cli.application import Application +from ddev.cli.size.utils.common_params import common_params -from .common import ( +from .utils.common_funcs import ( CLIParameters, FileDataEntry, FileDataEntryPlatformVersion, GitRepo, convert_to_human_readable_size, + export_format, format_modules, get_dependencies, get_files, get_valid_platforms, get_valid_versions, plot_treemap, - print_csv, - print_json, - print_markdown, print_table, ) @@ -38,20 +36,8 @@ @click.command() @click.argument("first_commit") @click.argument("second_commit") -@click.option( - "--platform", help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed" -) @click.option("--python", "version", help="Python version (e.g 3.12). If not specified, all versions will be analyzed") -@click.option("--compressed", is_flag=True, help="Measure compressed size") -@click.option("--csv", is_flag=True, help="Output in CSV format") -@click.option("--markdown", is_flag=True, help="Output in Markdown format") -@click.option("--json", is_flag=True, help="Output in JSON format") -@click.option("--save_to_png_path", help="Path to save the treemap as PNG") -@click.option( - "--show_gui", - is_flag=True, - help="Display a pop-up window with a treemap showing size differences between the two commits.", -) +@common_params # platform, compressed, format, show_gui @click.pass_obj def diff( app: Application, @@ -60,10 +46,7 @@ def diff( platform: Optional[str], version: Optional[str], compressed: bool, - csv: bool, - markdown: bool, - json: bool, - save_to_png_path: str, + format: list[str], show_gui: bool, ) -> None: """ @@ -78,8 +61,6 @@ def diff( console=console, ) as progress: task = progress.add_task("[cyan]Calculating differences...", total=None) - if sum([csv, markdown, json]) > 1: - raise click.BadParameter("Only one output format can be selected: --csv, --markdown, or --json") if len(first_commit) < MINIMUM_LENGTH_COMMIT and len(second_commit) < MINIMUM_LENGTH_COMMIT: raise click.BadParameter(f"Commit hashes must be at least {MINIMUM_LENGTH_COMMIT} characters long") elif len(first_commit) < MINIMUM_LENGTH_COMMIT: @@ -94,7 +75,10 @@ def diff( ) if first_commit == second_commit: raise click.BadParameter("Commit hashes must be different") - + if format: + for fmt in format: + if fmt not in ["png", "csv", "markdown", "json"]: + raise ValueError(f"Invalid format: {fmt}. Only png, csv, markdown, and json are supported.") repo_url = app.repo.path with GitRepo(repo_url) as gitRepo: @@ -109,104 +93,44 @@ def diff( raise ValueError(f"Invalid platform: {platform}") elif version and version not in valid_versions: raise ValueError(f"Invalid version: {version}") - if platform is None or version is None: - modules_plat_ver: list[FileDataEntryPlatformVersion] = [] - platforms = valid_platforms if platform is None else [platform] - versions = valid_versions if version is None else [version] - progress.remove_task(task) - combinations = [(p, v) for p in platforms for v in versions] - for plat, ver in combinations: - path = None - if save_to_png_path: - base, ext = os.path.splitext(save_to_png_path) - path = f"{base}_{plat}_{ver}{ext}" - parameters: CLIParameters = { - "app": app, - "platform": plat, - "version": ver, - "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": path, - "show_gui": show_gui, - } - multiple_plats_and_vers: Literal[True] = True - modules_plat_ver.extend( - diff_mode( - gitRepo, - first_commit, - second_commit, - parameters, - progress, - multiple_plats_and_vers, - ) - ) - if csv: - print_csv(app, modules_plat_ver) - elif json: - print_json(app, modules_plat_ver) - else: - progress.remove_task(task) - modules: list[FileDataEntry] = [] - multiple_plat_and_ver: Literal[False] = False - base_parameters: CLIParameters = { + modules_plat_ver: list[FileDataEntryPlatformVersion] = [] + platforms = valid_platforms if platform is None else [platform] + versions = valid_versions if version is None else [version] + progress.remove_task(task) + combinations = [(p, v) for p in platforms for v in versions] + for plat, ver in combinations: + parameters: CLIParameters = { "app": app, - "platform": platform, - "version": version, + "platform": plat, + "version": ver, "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": save_to_png_path, + "format": format, "show_gui": show_gui, } - modules.extend( + modules_plat_ver.extend( diff_mode( gitRepo, first_commit, second_commit, - base_parameters, + parameters, progress, - multiple_plat_and_ver, ) ) - if csv: - print_csv(app, modules) - elif json: - print_json(app, modules) + if format: + export_format(app, format, modules_plat_ver, "diff", platform, version, compressed) except Exception as e: progress.stop() app.abort(str(e)) return None -@overload -def diff_mode( - gitRepo: GitRepo, - first_commit: str, - second_commit: str, - params: CLIParameters, - progress: Progress, - multiple_plats_and_vers: Literal[True], -) -> list[FileDataEntryPlatformVersion]: ... -@overload def diff_mode( gitRepo: GitRepo, first_commit: str, second_commit: str, params: CLIParameters, progress: Progress, - multiple_plats_and_vers: Literal[False], -) -> list[FileDataEntry]: ... -def diff_mode( - gitRepo: GitRepo, - first_commit: str, - second_commit: str, - params: CLIParameters, - progress: Progress, - multiple_plats_and_vers: bool, -) -> list[FileDataEntryPlatformVersion] | list[FileDataEntry]: +) -> list[FileDataEntryPlatformVersion]: files_b, dependencies_b, files_a, dependencies_a = get_repo_info( gitRepo, params["platform"], params["version"], first_commit, second_commit, params["compressed"], progress ) @@ -218,31 +142,31 @@ def diff_mode( params["app"].display_error( f"No size differences were detected between the selected commits for {params['platform']}" ) - formatted_modules = format_modules( - integrations + dependencies, params["platform"], params["version"], multiple_plats_and_vers - ) + return [] else: - formatted_modules = format_modules( - integrations + dependencies, params["platform"], params["version"], multiple_plats_and_vers - ) + formatted_modules = format_modules(integrations + dependencies, params["platform"], params["version"]) formatted_modules.sort(key=lambda x: x["Size_Bytes"], reverse=True) for module in formatted_modules: if module["Size_Bytes"] > 0: module["Size"] = f"+{module['Size']}" - if params["markdown"]: - print_markdown(params["app"], "Differences between selected commits", formatted_modules) - elif not params["csv"] and not params["json"]: - print_table(params["app"], "Differences between selected commits", formatted_modules) - - if params["show_gui"] or params["save_to_png_path"]: - plot_treemap( - formatted_modules, - f"Disk Usage Differences for {params['platform']} and Python version {params['version']}", - params["show_gui"], - "diff", - params["save_to_png_path"], - ) + if not params["format"] or params["format"] == ["png"]: # if no format is provided for the data print the table + print_table(params["app"], "Status", formatted_modules) + + treemap_path = ( + f"treemap_{params['platform']}_{params['version']}.png" + if params["format"] and "png" in params["format"] + else None + ) + + if params["show_gui"] or treemap_path: + plot_treemap( + formatted_modules, + f"Disk Usage Differences for {params['platform']} and Python version {params['version']}", + params["show_gui"], + "diff", + treemap_path, + ) return formatted_modules diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py index ad09b45e89ed3..6b672c16f6a3c 100644 --- a/ddev/src/ddev/cli/size/status.py +++ b/ddev/src/ddev/cli/size/status.py @@ -2,67 +2,49 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import os # noqa: I001 from pathlib import Path -from typing import Optional, Literal, overload +from typing import Optional import click from rich.console import Console from ddev.cli.application import Application - -from .common import ( - FileDataEntry, - FileDataEntryPlatformVersion, +from ddev.cli.size.utils.common_funcs import ( CLIParameters, + FileDataEntryPlatformVersion, + export_format, format_modules, get_dependencies, get_files, get_valid_platforms, get_valid_versions, plot_treemap, - print_csv, - print_json, - print_markdown, print_table, + send_metrics_to_dd, ) +from ddev.cli.size.utils.common_params import common_params console = Console(stderr=True) @click.command() -@click.option( - "--platform", help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed" -) +@click.option("--to-dd-org", type=str, help="Send metrics to Datadog using the specified organization name.") @click.option("--python", "version", help="Python version (e.g 3.12). If not specified, all versions will be analyzed") -@click.option("--compressed", is_flag=True, help="Measure compressed size") -@click.option("--csv", is_flag=True, help="Output in CSV format") -@click.option("--markdown", is_flag=True, help="Output in Markdown format") -@click.option("--json", is_flag=True, help="Output in JSON format") -@click.option("--save_to_png_path", help="Path to save the treemap as PNG") -@click.option( - "--show_gui", - is_flag=True, - help="Display a pop-up window with a treemap showing the current size distribution of modules.", -) +@common_params # platform, compressed, format, show_gui @click.pass_obj def status( app: Application, platform: Optional[str], version: Optional[str], compressed: bool, - csv: bool, - markdown: bool, - json: bool, - save_to_png_path: Optional[str], + format: list[str], show_gui: bool, + to_dd_org: str, ) -> None: """ Show the current size of all integrations and dependencies. """ try: - if sum([csv, markdown, json]) > 1: - raise click.BadParameter("Only one output format can be selected: --csv, --markdown, or --json") repo_path = app.repo.path valid_platforms = get_valid_platforms(repo_path) valid_versions = get_valid_versions(repo_path) @@ -70,107 +52,66 @@ def status( raise ValueError(f"Invalid platform: {platform}") elif version and version not in valid_versions: raise ValueError(f"Invalid version: {version}") - - if platform is None or version is None: - modules_plat_ver: list[FileDataEntryPlatformVersion] = [] - platforms = valid_platforms if platform is None else [platform] - versions = valid_versions if version is None else [version] - combinations = [(p, v) for p in platforms for v in versions] - for plat, ver in combinations: - multiple_plats_and_vers: Literal[True] = True - path = None - if save_to_png_path: - base, ext = os.path.splitext(save_to_png_path) - path = f"{base}_{plat}_{ver}{ext}" - parameters: CLIParameters = { - "app": app, - "platform": plat, - "version": ver, - "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": path, - "show_gui": show_gui, - } - modules_plat_ver.extend( - status_mode( - repo_path, - parameters, - multiple_plats_and_vers, - ) - ) - if csv: - print_csv(app, modules_plat_ver) - elif json: - print_json(app, modules_plat_ver) - else: - modules: list[FileDataEntry] = [] - multiple_plat_and_ver: Literal[False] = False - base_parameters: CLIParameters = { + if format: + for fmt in format: + if fmt not in ["png", "csv", "markdown", "json"]: + raise ValueError(f"Invalid format: {fmt}. Only png, csv, markdown, and json are supported.") + modules_plat_ver: list[FileDataEntryPlatformVersion] = [] + platforms = valid_platforms if platform is None else [platform] + versions = valid_versions if version is None else [version] + combinations = [(p, v) for p in platforms for v in versions] + for plat, ver in combinations: + parameters: CLIParameters = { "app": app, - "platform": platform, - "version": version, + "platform": plat, + "version": ver, "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": save_to_png_path, + "format": format, "show_gui": show_gui, } - modules.extend( + modules_plat_ver.extend( status_mode( repo_path, - base_parameters, - multiple_plat_and_ver, + parameters, ) ) - if csv: - print_csv(app, modules) - elif json: - print_json(app, modules) + if format: + export_format(app, format, modules_plat_ver, "status", platform, version, compressed) + if to_dd_org: + send_metrics_to_dd(app, modules_plat_ver, to_dd_org, compressed) except Exception as e: app.abort(str(e)) -@overload def status_mode( repo_path: Path, params: CLIParameters, - multiple_plats_and_vers: Literal[True], -) -> list[FileDataEntryPlatformVersion]: ... -@overload -def status_mode( - repo_path: Path, - params: CLIParameters, - multiple_plats_and_vers: Literal[False], -) -> list[FileDataEntry]: ... -def status_mode( - repo_path: Path, - params: CLIParameters, - multiple_plats_and_vers: bool, -) -> list[FileDataEntryPlatformVersion] | list[FileDataEntry]: +) -> list[FileDataEntryPlatformVersion]: with console.status("[cyan]Calculating sizes...", spinner="dots"): modules = get_files(repo_path, params["compressed"]) + get_dependencies( repo_path, params["platform"], params["version"], params["compressed"] ) - formatted_modules = format_modules(modules, params["platform"], params["version"], multiple_plats_and_vers) + formatted_modules = format_modules(modules, params["platform"], params["version"]) formatted_modules.sort(key=lambda x: x["Size_Bytes"], reverse=True) - if params["markdown"]: - print_markdown(params["app"], "Status", formatted_modules) - elif not params["csv"] and not params["json"]: + if not params["format"] or params["format"] == ["png"]: # if no format is provided for the data print the table print_table(params["app"], "Status", formatted_modules) - if params["show_gui"] or params["save_to_png_path"]: + treemap_path = ( + f"treemap_{params['platform']}_{params['version']}.png" + if params["format"] and "png" in params["format"] + else None + ) + + if params["show_gui"] or treemap_path: plot_treemap( formatted_modules, f"Disk Usage Status for {params['platform']} and Python version {params['version']}", params["show_gui"], "status", - params["save_to_png_path"], + treemap_path, ) return formatted_modules diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py index caec7c3efd992..0fe799e373240 100644 --- a/ddev/src/ddev/cli/size/timeline.py +++ b/ddev/src/ddev/cli/size/timeline.py @@ -4,7 +4,7 @@ import zipfile from datetime import date, datetime from pathlib import Path -from typing import Literal, Optional, overload +from typing import Optional, overload import click import matplotlib.pyplot as plt @@ -13,8 +13,9 @@ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn from ddev.cli.application import Application +from ddev.cli.size.utils.common_params import common_params -from .common import ( +from .utils.common_funcs import ( CommitEntry, CommitEntryPlatformWithDelta, CommitEntryWithDelta, @@ -29,10 +30,10 @@ get_valid_platforms, is_correct_dependency, is_valid_integration, - print_csv, - print_json, - print_markdown, print_table, + save_csv, + save_json, + save_markdown, ) MINIMUM_DATE_DEPENDENCIES = datetime.strptime( @@ -45,8 +46,8 @@ @click.command() @click.argument("type", type=click.Choice(["integration", "dependency"])) @click.argument("name") -@click.argument("initial_commit", required=False) -@click.argument("final_commit", required=False) +@click.option("--initial-commit", help="Initial commit to analyze. If not specified, will start from the first commit") +@click.option("--final-commit", help="Final commit to analyze. If not specified, will end at the latest commit") @click.option( "--time", help="Filter commits starting from a specific date. Accepts both absolute and relative formats, " @@ -57,20 +58,7 @@ type=click.IntRange(min=0), help="Only show modules with size differences greater than a threshold in bytes", ) -@click.option( - "--platform", - help="Target platform to analyze. Only required for dependencies. If not specified, all platforms will be analyzed", -) -@click.option("--compressed", is_flag=True, help="Measure compressed size") -@click.option("--csv", is_flag=True, help="Output results in CSV format") -@click.option("--markdown", is_flag=True, help="Output in Markdown format") -@click.option("--json", is_flag=True, help="Output in JSON format") -@click.option("--save_to_png_path", help="Path to save the treemap as PNG") -@click.option( - "--show_gui", - is_flag=True, - help="Display a pop-up window with a line chart showing the size evolution of the selected module over time.", -) +@common_params # platform, compressed, format, show_gui @click.pass_obj def timeline( app: Application, @@ -82,10 +70,7 @@ def timeline( threshold: Optional[int], platform: Optional[str], compressed: bool, - csv: bool, - markdown: bool, - json: bool, - save_to_png_path: str, + format: Optional[list[str]], show_gui: bool, ) -> None: """ @@ -99,31 +84,34 @@ def timeline( transient=True, console=console, ) as progress: - module = name # module is the name of the integration or the dependency - if sum([csv, markdown, json]) > 1: - raise click.BadParameter("Only one output format can be selected: --csv, --markdown, or --json") - elif ( - initial_commit - and final_commit - and len(initial_commit) < MINIMUM_LENGTH_COMMIT - and len(final_commit) < MINIMUM_LENGTH_COMMIT - ): - raise click.BadParameter(f"Commit hashes must be at least {MINIMUM_LENGTH_COMMIT} characters long") - elif initial_commit and len(initial_commit) < MINIMUM_LENGTH_COMMIT: - raise click.BadParameter( - f"Initial commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.", param_hint="initial" - ) - elif final_commit and len(final_commit) < MINIMUM_LENGTH_COMMIT: - raise click.BadParameter( - f"Final commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.", param_hint="final" - ) - elif final_commit and initial_commit and final_commit == initial_commit: - raise click.BadParameter("Commit hashes must be different") - task = progress.add_task("[cyan]Calculating timeline...", total=None) - url = app.repo.path - - with GitRepo(url) as gitRepo: - try: + try: + module = name # module is the name of the integration or the dependency + if ( + initial_commit + and final_commit + and len(initial_commit) < MINIMUM_LENGTH_COMMIT + and len(final_commit) < MINIMUM_LENGTH_COMMIT + ): + raise click.BadParameter(f"Commit hashes must be at least {MINIMUM_LENGTH_COMMIT} characters long") + elif initial_commit and len(initial_commit) < MINIMUM_LENGTH_COMMIT: + raise click.BadParameter( + f"Initial commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.", + param_hint="initial", + ) + elif final_commit and len(final_commit) < MINIMUM_LENGTH_COMMIT: + raise click.BadParameter( + f"Final commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.", param_hint="final" + ) + elif final_commit and initial_commit and final_commit == initial_commit: + raise click.BadParameter("Commit hashes must be different") + if format: + for fmt in format: + if fmt not in ["png", "csv", "markdown", "json"]: + raise ValueError(f"Invalid format: {fmt}. Only png, csv, markdown, and json are supported.") + task = progress.add_task("[cyan]Calculating timeline...", total=None) + url = app.repo.path + + with GitRepo(url) as gitRepo: if final_commit and type == "dependency": date_str, _, _ = gitRepo.get_commit_metadata(final_commit) date = datetime.strptime(date_str, "%b %d %Y").date() @@ -172,28 +160,20 @@ def timeline( return if type == "dependency": modules_plat: list[CommitEntryPlatformWithDelta] = [] - multiple_plats_and_vers: Literal[True] = True progress.remove_task(task) dep_parameters: InitialParametersTimelineDependency - if not platform: + if platform is None: for plat in valid_platforms: - path = None - if save_to_png_path: - base, ext = os.path.splitext(save_to_png_path) - path = f"{base}_{plat}{ext}" dep_parameters = { "app": app, "type": "dependency", "module": module, "threshold": threshold, - "platform": plat, "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": path, + "format": format, "show_gui": show_gui, "first_commit": None, + "platform": plat, } modules_plat.extend( @@ -201,56 +181,45 @@ def timeline( gitRepo, commits, dep_parameters, - multiple_plats_and_vers, progress, ) ) - else: + else: # dependency and platform dep_parameters = { "app": app, "type": "dependency", "module": module, "threshold": threshold, - "platform": platform, "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": save_to_png_path, + "format": format, "show_gui": show_gui, "first_commit": None, + "platform": platform, } modules_plat.extend( timeline_mode( gitRepo, commits, dep_parameters, - multiple_plats_and_vers, progress, ) ) + if format: + export_format(app, format, modules_plat, platform, module, compressed) - if csv: - print_csv(app, modules_plat) - elif json: - print_json(app, modules_plat) - else: + else: # integration modules: list[CommitEntryWithDelta] = [] - multiple_plat_and_ver: Literal[False] = False int_parameters: InitialParametersTimelineIntegration = { "app": app, "type": "integration", "module": module, "threshold": threshold, - "platform": None, "compressed": compressed, - "csv": csv, - "markdown": markdown, - "json": json, - "save_to_png_path": save_to_png_path, + "format": format, "show_gui": show_gui, "first_commit": first_commit, + "platform": None, } progress.remove_task(task) modules.extend( @@ -258,18 +227,15 @@ def timeline( gitRepo, commits, int_parameters, - multiple_plat_and_ver, progress, ) ) - if csv: - print_csv(app, modules) - elif json: - print_json(app, modules) + if format: + export_format(app, format, modules, None, module, compressed) - except Exception as e: - progress.stop() - app.abort(str(e)) + except Exception as e: + progress.stop() + app.abort(str(e)) @overload @@ -277,7 +243,6 @@ def timeline_mode( gitRepo: GitRepo, commits: list[str], params: InitialParametersTimelineDependency, - multiple_plats_and_vers: Literal[True], progress: Progress, ) -> list[CommitEntryPlatformWithDelta]: ... @@ -287,17 +252,6 @@ def timeline_mode( gitRepo: GitRepo, commits: list[str], params: InitialParametersTimelineIntegration, - multiple_plats_and_vers: Literal[False], - progress: Progress, -) -> list[CommitEntryWithDelta]: ... - - -@overload -def timeline_mode( - gitRepo: GitRepo, - commits: list[str], - params: InitialParametersTimelineDependency, - multiple_plats_and_vers: Literal[False], progress: Progress, ) -> list[CommitEntryWithDelta]: ... @@ -306,7 +260,6 @@ def timeline_mode( gitRepo: GitRepo, commits: list[str], params: InitialParametersTimelineIntegration | InitialParametersTimelineDependency, - multiple_plats_and_vers: bool, progress: Progress, ) -> list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta]: if params["type"] == "integration": @@ -324,17 +277,19 @@ def timeline_mode( progress, ) trimmed_modules = trim_modules(modules, params["threshold"]) - formatted_modules = format_modules(trimmed_modules, params["platform"], multiple_plats_and_vers) + formatted_modules = format_modules(trimmed_modules, params["platform"]) - if params["markdown"]: - print_markdown(params["app"], "Timeline for " + params["module"], formatted_modules) - elif not params["csv"] and not params["json"]: - print_table(params["app"], "Timeline for " + params["module"], formatted_modules) + if not params["format"] or params["format"] == ["png"]: # if no format is provided for the data print the table + print_table(params["app"], "Status", formatted_modules) - if params["show_gui"] or params["save_to_png_path"]: - plot_linegraph( - formatted_modules, params["module"], params["platform"], params["show_gui"], params["save_to_png_path"] - ) + timeline_path = ( + f"timeline_{params['module']}_{params['platform']}.png" + if params["platform"] and params["format"] and "png" in params["format"] + else f"timeline_{params['module']}.png" if params["format"] and "png" in params["format"] else None + ) + + if params["show_gui"] or timeline_path: + plot_linegraph(formatted_modules, params["module"], params["platform"], params["show_gui"], timeline_path) return formatted_modules @@ -370,7 +325,6 @@ def get_repo_info( gitRepo: Active GitRepo instance. params: Parameters Typed Dictionary containing module name, type, platform, and other configuration options. commits: List of commits to process. - first_commit: First commit hash where the given integration was introduced (only for integrations). progress: Progress bar instance. Returns: @@ -378,38 +332,17 @@ def get_repo_info( """ with progress: if params["type"] == "integration": - file_data = process_commits(commits, params, gitRepo, progress, params["first_commit"]) + file_data = process_commits(commits, params, gitRepo, progress) else: - file_data = process_commits(commits, params, gitRepo, progress, params["first_commit"]) + file_data = process_commits(commits, params, gitRepo, progress) return file_data -@overload -def process_commits( - commits: list[str], - params: InitialParametersTimelineIntegration, - gitRepo: GitRepo, - progress: Progress, - first_commit: str, -) -> list[CommitEntry]: ... - - -@overload -def process_commits( - commits: list[str], - params: InitialParametersTimelineDependency, - gitRepo: GitRepo, - progress: Progress, - first_commit: None, -) -> list[CommitEntry]: ... - - def process_commits( commits: list[str], params: InitialParametersTimelineIntegration | InitialParametersTimelineDependency, gitRepo: GitRepo, progress: Progress, - first_commit: Optional[str], ) -> list[CommitEntry]: """ Processes a list of commits for a given integration or dependency. @@ -423,7 +356,6 @@ def process_commits( type, platform, and other configuration options. gitRepo: GitRepo instance managing the repository. progress: Progress bar instance. - first_commit: First commit hash where the given integration was introduced (only for integrations). Returns: A list of CommitEntry objects with commit metadata and size information. @@ -437,9 +369,8 @@ def process_commits( for commit in commits: gitRepo.sparse_checkout_commit(commit, folder) date_str, author, message = gitRepo.get_commit_metadata(commit) - date, message, commit = format_commit_data(date_str, message, commit, first_commit) + date, message, commit = format_commit_data(date_str, message, commit, params["first_commit"]) if params["type"] == "dependency" and date > MINIMUM_DATE_DEPENDENCIES: - assert params["platform"] is not None result = get_dependencies( repo, params["module"], @@ -697,39 +628,18 @@ def get_version(files: list[str], platform: str) -> str: def format_modules( modules: list[CommitEntryWithDelta], platform: Optional[str], - multiple_plats_and_vers: bool, ) -> list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta]: """ Formats the modules list, adding platform and Python version information if needed. - If the modules list is empty, returns a default empty entry (with or without platform information). + Args: + modules: List of modules to format. + platform: Platform string to add to each entry if needed. + + Returns: + A list of formatted entries. """ - if modules == [] and multiple_plats_and_vers and platform: - empty_module_platform: CommitEntryPlatformWithDelta = { - "Size_Bytes": 0, - "Version": "", - "Date": datetime.min.date(), - "Author": "", - "Commit_Message": "", - "Commit_SHA": "", - "Delta_Bytes": 0, - "Delta": " ", - "Platform": "", - } - return [empty_module_platform] - elif modules == []: - empty_module: CommitEntryWithDelta = { - "Size_Bytes": 0, - "Version": "", - "Date": datetime.min.date(), - "Author": "", - "Commit_Message": "", - "Commit_SHA": "", - "Delta_Bytes": 0, - "Delta": " ", - } - return [empty_module] - elif multiple_plats_and_vers and platform: + if platform: new_modules: list[CommitEntryPlatformWithDelta] = [{**entry, "Platform": platform} for entry in modules] return new_modules else: @@ -848,6 +758,35 @@ def get_dependency_list(path: str, platforms: set[str]) -> set[str]: return dependencies +def export_format( + app: Application, + format: list[str], + modules: list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta], + platform: Optional[str], + module: str, + compressed: bool, +) -> None: + size_type = "compressed" if compressed else "uncompressed" + for output_format in format: + if output_format == "csv": + csv_filename = ( + f"{module}_{platform}_{size_type}_timeline.csv" if platform else f"{module}_{size_type}_timeline.csv" + ) + save_csv(app, modules, csv_filename) + + elif output_format == "json": + json_filename = ( + f"{module}_{platform}_{size_type}_timeline.json" if platform else f"{module}_{size_type}_timeline.json" + ) + save_json(app, json_filename, modules) + + elif output_format == "markdown": + markdown_filename = ( + f"{module}_{platform}_{size_type}_timeline.md" if platform else f"{module}_{size_type}_timeline.md" + ) + save_markdown(app, "Timeline", modules, markdown_filename) + + def plot_linegraph( modules: list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta], module: str, @@ -865,7 +804,7 @@ def plot_linegraph( show: If True, displays the plot interactively. path: If provided, saves the plot to this file path. """ - if not any(str(value).strip() not in ("", "0", "0001-01-01") for value in modules[0].values()): # table is empty + if modules == []: return dates = [entry["Date"] for entry in modules] diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/utils/common_funcs.py similarity index 71% rename from ddev/src/ddev/cli/size/common.py rename to ddev/src/ddev/cli/size/utils/common_funcs.py index 74408f50115e9..d9b1f1ad40264 100644 --- a/ddev/src/ddev/cli/size/common.py +++ b/ddev/src/ddev/cli/size/utils/common_funcs.py @@ -17,6 +17,7 @@ import matplotlib.pyplot as plt import requests import squarify +from datadog import api, initialize from matplotlib.patches import Patch from ddev.cli.application import Application @@ -54,39 +55,33 @@ class CommitEntryPlatformWithDelta(CommitEntryWithDelta): class CLIParameters(TypedDict): - app: Application - platform: str - version: str - compressed: bool - csv: bool - markdown: bool - json: bool - save_to_png_path: Optional[str] - show_gui: bool + app: Application # Main application instance for CLI operations + platform: str # Target platform for analysis (e.g. linux-aarch64) + version: str # Target Python version for analysis + compressed: bool # Whether to analyze compressed file sizes + format: Optional[list[str]] # Output format options (png, csv, markdown, json) + show_gui: bool # Whether to display interactive visualization class CLIParametersTimeline(TypedDict): - app: Application - module: str - threshold: Optional[int] - compressed: bool - csv: bool - markdown: bool - json: bool - save_to_png_path: Optional[str] - show_gui: bool + app: Application # Main application instance for CLI operations + module: str # Name of module to analyze + threshold: Optional[int] # Minimum size threshold for filtering + compressed: bool # Whether to analyze compressed file sizes + format: Optional[list[str]] # Output format options (png, csv, markdown, json) + show_gui: bool # Whether to display interactive visualization class InitialParametersTimelineIntegration(CLIParametersTimeline): - type: Literal["integration"] - first_commit: str - platform: None + type: Literal["integration"] # Specifies this is for integration analysis + first_commit: str # Starting commit hash for timeline analysis + platform: None # Platform not needed for integration analysis class InitialParametersTimelineDependency(CLIParametersTimeline): - type: Literal["dependency"] - first_commit: None - platform: str + type: Literal["dependency"] # Specifies this is for dependency analysis + first_commit: None # No commit needed for dependency analysis + platform: str # Target platform for dependency analysis def get_valid_platforms(repo_path: Path | str) -> set[str]: @@ -322,44 +317,19 @@ def format_modules( modules: list[FileDataEntry], platform: str, py_version: str, - multiple_plats_and_vers: bool, -) -> list[FileDataEntryPlatformVersion] | list[FileDataEntry]: +) -> list[FileDataEntryPlatformVersion]: """ Formats the modules list, adding platform and Python version information. - - If the modules list is empty, returns a default empty entry. """ - if modules == [] and not multiple_plats_and_vers: - empty_entry: FileDataEntry = { - "Name": "", - "Version": "", - "Size_Bytes": 0, - "Size": "", - "Type": "", - } - return [empty_entry] - elif modules == []: - empty_entry_with_platform: FileDataEntryPlatformVersion = { - "Name": "", - "Version": "", - "Size_Bytes": 0, - "Size": "", - "Type": "", - "Platform": "", - "Python_Version": "", - } - return [empty_entry_with_platform] - elif multiple_plats_and_vers: - new_modules: list[FileDataEntryPlatformVersion] = [ - {**entry, "Platform": platform, "Python_Version": py_version} for entry in modules - ] - return new_modules - else: - return modules + new_modules: list[FileDataEntryPlatformVersion] = [ + {**entry, "Platform": platform, "Python_Version": py_version} for entry in modules + ] + return new_modules -def print_json( +def save_json( app: Application, + file_path: str, modules: ( list[FileDataEntry] | list[FileDataEntryPlatformVersion] @@ -367,33 +337,31 @@ def print_json( | list[CommitEntryPlatformWithDelta] ), ) -> None: - printed_yet = False - app.display("[") - for row in modules: - if any(str(value).strip() not in ("", "0", "0001-01-01") for value in row.values()): - if printed_yet: - app.display(",") - app.display(json.dumps(row, default=str)) - printed_yet = True + if modules == []: + return - app.display("]") + with open(file_path, "w", encoding="utf-8") as f: + json.dump(modules, f, default=str, indent=2) + app.display(f"JSON file saved to {file_path}") -def print_csv( +def save_csv( app: Application, - modules: ( - list[FileDataEntry] - | list[FileDataEntryPlatformVersion] - | list[CommitEntryWithDelta] - | list[CommitEntryPlatformWithDelta] - ), + modules: list[FileDataEntryPlatformVersion] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta], + file_path: str, ) -> None: + if modules == []: + return + headers = [k for k in modules[0].keys() if k not in ["Size", "Delta"]] - app.display(",".join(headers)) - for row in modules: - if any(str(value).strip() not in ("", "0", "0001-01-01") for value in row.values()): - app.display(",".join(format(str(row.get(h, ""))) for h in headers)) + with open(file_path, "w", encoding="utf-8") as f: + f.write(",".join(headers) + "\n") + + for row in modules: + f.write(",".join(format(str(row.get(h, ""))) for h in headers) + "\n") + + app.display(f"CSV file saved to {file_path}") def format(s: str) -> str: @@ -403,61 +371,129 @@ def format(s: str) -> str: return f'"{s}"' if "," in s else s -def print_markdown( +def save_markdown( app: Application, title: str, - modules: ( - list[FileDataEntry] - | list[FileDataEntryPlatformVersion] - | list[CommitEntryWithDelta] - | list[CommitEntryPlatformWithDelta] - ), + modules: list[FileDataEntryPlatformVersion] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta], + file_path: str, ) -> None: - if all(str(value).strip() in ("", "0", "0001-01-01") for value in modules[0].values()): - return # skip empty table + if modules == []: + return headers = [k for k in modules[0].keys() if "Bytes" not in k] + # Group modules by platform and version + grouped_modules = {(modules[0].get("Platform", ""), modules[0].get("Python_Version", "")): [modules[0]]} + for module in modules[1:]: + platform = module.get("Platform", "") + version = module.get("Python_Version", "") + key = (platform, version) + if key not in grouped_modules: + grouped_modules[key] = [] + if any(str(value).strip() not in ("", "0", "0001-01-01") for value in module.values()): + grouped_modules[key].append(module) + lines = [] - lines.append(f"### {title}") - lines.append("| " + " | ".join(headers) + " |") - lines.append("| " + " | ".join("---" for _ in headers) + " |") - for row in modules: - lines.append("| " + " | ".join(str(row.get(h, "")) for h in headers) + " |") + lines.append(f"# {title}") + lines.append("") + + for (platform, version), group in grouped_modules.items(): + if platform and version: + lines.append(f"## Platform: {platform}, Python Version: {version}") + elif platform: + lines.append(f"## Platform: {platform}") + elif version: + lines.append(f"## Python Version: {version}") + else: + lines.append("## Other") + + lines.append("") + lines.append("| " + " | ".join(headers) + " |") + lines.append("| " + " | ".join("---" for _ in headers) + " |") + for row in group: + lines.append("| " + " | ".join(str(row.get(h, "")) for h in headers) + " |") + lines.append("") markdown = "\n".join(lines) - app.display_markdown(markdown) + + with open(file_path, "a", encoding="utf-8") as f: + f.write(markdown) + app.display(f"Markdown table saved to {file_path}") def print_table( app: Application, mode: str, - modules: ( - list[FileDataEntry] - | list[FileDataEntryPlatformVersion] - | list[CommitEntryWithDelta] - | list[CommitEntryPlatformWithDelta] - ), + modules: list[FileDataEntryPlatformVersion] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta], ) -> None: + if modules == []: + return + columns = [col for col in modules[0].keys() if "Bytes" not in col] modules_table: dict[str, dict[int, str]] = {col: {} for col in columns} for i, row in enumerate(modules): - if any(str(value).strip() not in ("", "0", "0001-01-01") for value in row.values()): - for key in columns: - modules_table[key][i] = str(row.get(key, "")) + for key in columns: + modules_table[key][i] = str(row.get(key, "")) app.display_table(mode, modules_table) +def export_format( + app: Application, + format: list[str], + modules: list[FileDataEntryPlatformVersion], + mode: Literal["status", "diff"], + platform: Optional[str], + version: Optional[str], + compressed: bool, +) -> None: + size_type = "compressed" if compressed else "uncompressed" + for output_format in format: + if output_format == "csv": + csv_filename = ( + f"{platform}_{version}_{size_type}_{mode}.csv" + if platform and version + else ( + f"{version}_{size_type}_{mode}.csv" + if version + else f"{platform}_{size_type}_{mode}.csv" if platform else f"{size_type}_{mode}.csv" + ) + ) + save_csv(app, modules, csv_filename) + + elif output_format == "json": + json_filename = ( + f"{platform}_{version}_{size_type}_{mode}.json" + if platform and version + else ( + f"{version}_{size_type}_{mode}.json" + if version + else f"{platform}_{size_type}_{mode}.json" if platform else f"{size_type}_{mode}.json" + ) + ) + save_json(app, json_filename, modules) + + elif output_format == "markdown": + markdown_filename = ( + f"{platform}_{version}_{size_type}_{mode}.md" + if platform and version + else ( + f"{version}_{size_type}_{mode}.md" + if version + else f"{platform}_{size_type}_{mode}.md" if platform else f"{size_type}_{mode}.md" + ) + ) + save_markdown(app, "Status", modules, markdown_filename) + + def plot_treemap( - modules: list[FileDataEntry] | list[FileDataEntryPlatformVersion], + modules: list[FileDataEntryPlatformVersion], title: str, show: bool, - mode: Literal["status", "diff"] = "status", + mode: Literal["status", "diff"], path: Optional[str] = None, ) -> None: - if not any(str(value).strip() not in ("", "0") for value in modules[0].values()): - # table is empty + if modules == []: return # Initialize figure and axis @@ -484,10 +520,10 @@ def plot_treemap( plt.subplots_adjust(right=0.8) plt.tight_layout() - if show: - plt.show() if path: plt.savefig(path, bbox_inches="tight", format="png") + if show: + plt.show() def plot_status_treemap( @@ -655,6 +691,89 @@ def draw_treemap_rects_with_labels( ) +def send_metrics_to_dd( + app: Application, modules: list[FileDataEntryPlatformVersion], org: str, compressed: bool +) -> None: + metric_name = ( + "datadog.agent_integrations.size_analyzer.compressed" + if compressed + else "datadog.agent_integrations.size_analyzer.uncompressed" + ) + config_file_info = get_org(app, org) + if not is_everything_committed(): + raise RuntimeError("All files have to be committed in order to send the metrics to Datadog") + if 'api_key' not in config_file_info: + raise RuntimeError("No API key found in config file") + if 'site' not in config_file_info: + raise RuntimeError("No site found in config file") + + timestamp = get_last_commit_timestamp() + + metrics = [] + + for item in modules: + metrics.append( + { + "metric": metric_name, + "type": "gauge", + "points": [(timestamp, item["Size_Bytes"])], + "tags": [ + f"name:{item['Name']}", + f"type:{item['Type']}", + f"name_type:{item['Type']}({item['Name']})", + f"version:{item['Version']}", + f"platform:{item['Platform']}", + "team:agent-integrations", + ], + } + ) + + initialize( + api_key=config_file_info["api_key"], + api_host=f"https://api.{config_file_info['site']}", + ) + + api.Metric.send(metrics=metrics) + + +def get_org(app: Application, org: str) -> dict[str, str]: + config_path: Path = app.config_file.path + + current_section = None + org_data = {} + + with open(config_path, "r", encoding="utf-8") as f: + for line in f: + line = line.strip() + if not line or line.startswith("#"): + continue + + # Detect section header + if line.startswith("[") and line.endswith("]"): + current_section = line[1:-1] + continue + + if current_section == f"orgs.{org}": + if "=" in line: + key, value = line.split("=", 1) + key = key.strip() + value = value.strip().strip('"') + org_data[key] = value + if not org_data: + raise ValueError(f"Organization '{org}' not found in config") + return org_data + + +def is_everything_committed() -> bool: + result = subprocess.run(["git", "status", "--porcelain"], capture_output=True, text=True) + return result.stdout.strip() == "" + + +def get_last_commit_timestamp() -> int: + result = subprocess.run(["git", "log", "-1", "--format=%ct"], capture_output=True, text=True, check=True) + return int(result.stdout.strip()) + + class WrongDependencyFormat(Exception): def __init__(self, mensaje: str) -> None: super().__init__(mensaje) @@ -703,8 +822,13 @@ def get_module_commits( if time: return self._run(f'git log --since="{time}" --reverse --pretty=format:%H -- {module_path}') elif not initial and not final: + # Get all commits from first to latest return self._run(f"git log --reverse --pretty=format:%H -- {module_path}") + elif not initial: + # Get commits from first commit up to specified final commit + return self._run(f"git log --reverse --pretty=format:%H ..{final} -- {module_path}") elif not final: + # Get commits from specified initial commit up to latest return self._run(f"git log --reverse --pretty=format:%H {initial}..HEAD -- {module_path}") else: try: diff --git a/ddev/src/ddev/cli/size/utils/common_params.py b/ddev/src/ddev/cli/size/utils/common_params.py new file mode 100644 index 0000000000000..cc77784a7f1c8 --- /dev/null +++ b/ddev/src/ddev/cli/size/utils/common_params.py @@ -0,0 +1,33 @@ +import functools +from collections.abc import Callable + +import click + + +def common_params(func: Callable) -> Callable: + @functools.wraps(func) + @click.option( + "--platform", help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed" + ) + @click.option("--compressed", is_flag=True, help="Measure compressed size") + @click.option( + "--format", + help="Format of the output (comma-separated values: png, csv, markdown, json)", + callback=lambda _, __, v: v.split(",") if v else [], + ) + @click.option( + "--show-gui", + is_flag=True, + help="Display a pop-up window with a treemap showing the current size distribution of modules.", + ) + @click.pass_context + def wrapper( + ctx: click.Context, platform: str, compressed: bool, format: list[str], show_gui: bool, *args, **kwargs + ): + kwargs["platform"] = platform + kwargs["compressed"] = compressed + kwargs["format"] = format + kwargs["show_gui"] = show_gui + return ctx.invoke(func, *args, **kwargs) + + return wrapper diff --git a/ddev/tests/cli/size/test_create_dashboard.py b/ddev/tests/cli/size/test_create_dashboard.py new file mode 100644 index 0000000000000..57a72834f0840 --- /dev/null +++ b/ddev/tests/cli/size/test_create_dashboard.py @@ -0,0 +1,63 @@ +from unittest.mock import MagicMock, patch + +import pytest + + +@pytest.fixture +def app(): + mock_app = MagicMock() + mock_app.repo.path = "/fake/repo" + mock_app.abort = MagicMock() + return mock_app + + +@pytest.fixture() +def mock_dashboard_env(): + with ( + patch("ddev.cli.size.create_dashboard.get_org") as mock_get_org, + patch("ddev.cli.size.create_dashboard.get_valid_platforms") as mock_get_valid_platforms, + patch("ddev.cli.size.create_dashboard.requests.post") as mock_post, + ): + mock_get_org.return_value = {"api_key": "fake-api-key", "app_key": "fake-app-key", "site": "datadoghq.com"} + mock_get_valid_platforms.return_value = ["linux"] + mock_response = MagicMock() + mock_response.json.return_value = {"url": "/dashboard/abc123"} + mock_post.return_value = mock_response + + yield + + +def test_create_dashboard_success(ddev, app, mock_dashboard_env): + result = ddev("size", "create-dashboard", "--dd-org", "default", obj=app) + assert result.exit_code == 0 + assert "Dashboard URL: https://app.datadoghq.com/dashboard/abc123" in result.output + + +def test_create_dashboard_missing_api_key(ddev, app): + with patch("ddev.cli.size.create_dashboard.get_org") as mock_get_org: + mock_get_org.return_value = {"app_key": "fake-app-key", "site": "datadoghq.com"} + + result = ddev("size", "create-dashboard", "--dd-org", "default", obj=app) + + assert result.exit_code != 0 + assert "No API key found in config file" in result.output + + +def test_create_dashboard_missing_app_key(ddev, app): + with patch("ddev.cli.size.create_dashboard.get_org") as mock_get_org: + mock_get_org.return_value = {"api_key": "fake-api-key", "site": "datadoghq.com"} + + result = ddev("size", "create-dashboard", "--dd-org", "default", obj=app) + + assert result.exit_code != 0 + assert "No APP key found in config file" in result.output + + +def test_create_dashboard_missing_site(ddev, app): + with patch("ddev.cli.size.create_dashboard.get_org") as mock_get_org: + mock_get_org.return_value = {"api_key": "fake-api-key", "app_key": "fake-app-key"} + + result = ddev("size", "create-dashboard", "--dd-org", "default", obj=app) + + assert result.exit_code != 0 + assert "No site found in config file" in result.output diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py index bd5db6def4c54..69604f00ff07d 100644 --- a/ddev/tests/cli/size/test_diff.py +++ b/ddev/tests/cli/size/test_diff.py @@ -54,12 +54,14 @@ def get_compressed_dependencies_side_effect(_, __, ___, ____): patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo), patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None), patch("ddev.cli.size.diff.GitRepo.checkout_commit"), - patch("tempfile.mkdtemp", return_value="fake_repo"), + patch("ddev.cli.size.utils.common_funcs.tempfile.mkdtemp", return_value="fake_repo"), patch("ddev.cli.size.diff.get_files", side_effect=get_compressed_files_side_effect), patch("ddev.cli.size.diff.get_dependencies", side_effect=get_compressed_dependencies_side_effect), patch("ddev.cli.size.diff.format_modules", side_effect=lambda m, *_: m), - patch("matplotlib.pyplot.show"), - patch("matplotlib.pyplot.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.show"), + patch("ddev.cli.size.utils.common_funcs.plt.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.figure"), + patch("ddev.cli.size.utils.common_funcs.open", MagicMock()), ): yield @@ -67,11 +69,8 @@ def get_compressed_dependencies_side_effect(_, __, ___, ____): def test_diff_no_args(ddev, mock_size_diff_dependencies): assert ddev("size", "diff", "commit1", "commit2").exit_code == 0 assert ddev("size", "diff", "commit1", "commit2", "--compressed").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--csv").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--markdown").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--json").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--save_to_png_path", "out.png").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--show_gui").exit_code == 0 + assert ddev("size", "diff", "commit1", "commit2", "--format", "csv,markdown,json,png").exit_code == 0 + assert ddev("size", "diff", "commit1", "commit2", "--show-gui").exit_code == 0 def test_diff_with_platform_and_version(ddev, mock_size_diff_dependencies): @@ -82,22 +81,6 @@ def test_diff_with_platform_and_version(ddev, mock_size_diff_dependencies): ).exit_code == 0 ) - assert ( - ddev("size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--csv").exit_code - == 0 - ) - assert ( - ddev( - "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--markdown" - ).exit_code - == 0 - ) - assert ( - ddev( - "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--json" - ).exit_code - == 0 - ) assert ( ddev( "size", @@ -108,14 +91,14 @@ def test_diff_with_platform_and_version(ddev, mock_size_diff_dependencies): "linux-aarch64", "--python", "3.12", - "--save_to_png_path", - "out.png", + "--format", + "csv,markdown,json,png", ).exit_code == 0 ) assert ( ddev( - "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--show_gui" + "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--show-gui" ).exit_code == 0 ) @@ -138,11 +121,11 @@ def test_diff_no_differences(ddev): return_value=({'3.12'}), ), patch.object(fake_repo, "checkout_commit"), - patch("tempfile.mkdtemp", return_value="fake_repo"), - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.path.isfile", return_value=True), - patch("os.listdir", return_value=["linux-aarch64_3.12"]), + patch("ddev.cli.size.utils.common_funcs.tempfile.mkdtemp", return_value="fake_repo"), + patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.path.isdir", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.path.isfile", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.listdir", return_value=["linux-aarch64_3.12"]), patch( "ddev.cli.size.diff.get_files", return_value=[ @@ -157,8 +140,10 @@ def test_diff_no_differences(ddev): {"Name": "dep2.whl", "Version": "2.0.0", "Size_Bytes": 1000}, ], ), - patch("matplotlib.pyplot.show"), - patch("matplotlib.pyplot.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.show"), + patch("ddev.cli.size.utils.common_funcs.plt.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.figure"), + patch("ddev.cli.size.utils.common_funcs.open", MagicMock()), ): result = ddev( "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--compressed" @@ -169,11 +154,8 @@ def test_diff_no_differences(ddev): assert ddev("size", "diff", "commit1", "commit2").exit_code == 0 assert ddev("size", "diff", "commit1", "commit2", "--compressed").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--csv").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--markdown").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--json").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--save_to_png_path", "out.png").exit_code == 0 - assert ddev("size", "diff", "commit1", "commit2", "--show_gui").exit_code == 0 + assert ddev("size", "diff", "commit1", "commit2", "--format", "csv,markdown,json,png").exit_code == 0 + assert ddev("size", "diff", "commit1", "commit2", "--show-gui").exit_code == 0 def test_diff_invalid_platform(ddev): @@ -185,11 +167,11 @@ def test_diff_invalid_platform(ddev): with ( patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo), patch( - "ddev.cli.size.status.get_valid_platforms", + "ddev.cli.size.diff.get_valid_platforms", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}), ), patch( - "ddev.cli.size.status.get_valid_versions", + "ddev.cli.size.diff.get_valid_versions", return_value=({'3.12'}), ), ): @@ -207,11 +189,11 @@ def test_diff_invalid_version(ddev): with ( patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo), patch( - "ddev.cli.size.status.get_valid_platforms", + "ddev.cli.size.diff.get_valid_platforms", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}), ), patch( - "ddev.cli.size.status.get_valid_versions", + "ddev.cli.size.diff.get_valid_versions", return_value=({'3.12'}), ), ): @@ -238,11 +220,11 @@ def test_diff_invalid_platform_and_version(ddev): with ( patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo), patch( - "ddev.cli.size.status.get_valid_platforms", + "ddev.cli.size.diff.get_valid_platforms", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}), ), patch( - "ddev.cli.size.status.get_valid_versions", + "ddev.cli.size.diff.get_valid_versions", return_value=({'3.12'}), ), ): diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py index d60b09170bbef..03bc7e25d0078 100644 --- a/ddev/tests/cli/size/test_status.py +++ b/ddev/tests/cli/size/test_status.py @@ -43,7 +43,7 @@ def mock_size_status(): ] with ( - patch("ddev.cli.size.common.get_gitignore_files", return_value=set()), + patch("ddev.cli.size.utils.common_funcs.get_gitignore_files", return_value=set()), patch( "ddev.cli.size.status.get_valid_platforms", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}), @@ -54,16 +54,18 @@ def mock_size_status(): ), patch("ddev.cli.size.status.get_files", return_value=fake_files), patch("ddev.cli.size.status.get_dependencies", return_value=fake_deps), - patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"fake_root{os.sep}", "")), - patch("ddev.cli.size.status.print_csv"), - patch("ddev.cli.size.common.compress", return_value=1234), - patch("ddev.cli.size.status.print_table"), - patch("ddev.cli.size.status.plot_treemap"), - patch("os.walk", return_value=mock_walk), - patch("os.listdir", return_value=["fake_dep.whl"]), - patch("os.path.isfile", return_value=True), - patch("matplotlib.pyplot.show"), - patch("matplotlib.pyplot.savefig"), + patch( + "ddev.cli.size.utils.common_funcs.os.path.relpath", + side_effect=lambda path, _: path.replace(f"fake_root{os.sep}", ""), + ), + patch("ddev.cli.size.utils.common_funcs.compress", return_value=1234), + patch("ddev.cli.size.utils.common_funcs.os.walk", return_value=mock_walk), + patch("ddev.cli.size.utils.common_funcs.os.listdir", return_value=["fake_dep.whl"]), + patch("ddev.cli.size.utils.common_funcs.os.path.isfile", return_value=True), + patch("ddev.cli.size.utils.common_funcs.plt.show"), + patch("ddev.cli.size.utils.common_funcs.plt.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.figure"), + patch("ddev.cli.size.utils.common_funcs.open", MagicMock()), ): yield mock_app @@ -71,23 +73,17 @@ def mock_size_status(): def test_status_no_args(ddev, mock_size_status): assert ddev("size", "status").exit_code == 0 assert ddev("size", "status", "--compressed").exit_code == 0 - assert ddev("size", "status", "--csv").exit_code == 0 - assert ddev("size", "status", "--markdown").exit_code == 0 - assert ddev("size", "status", "--json").exit_code == 0 - assert ddev("size", "status", "--save_to_png_path", "out.png").exit_code == 0 - assert ddev("size", "status", "--show_gui").exit_code == 0 + assert ddev("size", "status", "--format", "csv,markdown,json,png").exit_code == 0 + assert ddev("size", "status", "--show-gui").exit_code == 0 def test_status(ddev, mock_size_status): assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12")).exit_code == 0 assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--compressed")).exit_code == 0 - assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--csv")).exit_code == 0 - assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--markdown")).exit_code == 0 - assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--json")).exit_code == 0 assert ( - ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--save_to_png_path", "out.png") + ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--format", "csv,markdown,json,png") ).exit_code == 0 - assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--show_gui")).exit_code == 0 + assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--show-gui")).exit_code == 0 def test_status_wrong_platform(ddev): diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py index a07e72a9a0b4d..11d321f4512b2 100644 --- a/ddev/tests/cli/size/test_timeline.py +++ b/ddev/tests/cli/size/test_timeline.py @@ -19,8 +19,8 @@ def mock_timeline(): patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"), patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), patch("ddev.cli.size.timeline.compress", return_value=1234), - patch("os.walk", return_value=[(Path("/tmp") / "fake_repo" / "int", [], ["file1.py"])]), - patch("os.path.exists", return_value=True), + patch("ddev.cli.size.timeline.os.walk", return_value=[(Path("/tmp") / "fake_repo" / "int", [], ["file1.py"])]), + patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True), patch("ddev.cli.size.timeline.format_modules", side_effect=lambda m, *_: m), patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m), patch( @@ -30,6 +30,7 @@ def mock_timeline(): patch("ddev.cli.size.timeline.plt.show"), patch("ddev.cli.size.timeline.plt.savefig"), patch("ddev.cli.size.timeline.plt.figure"), + patch("ddev.cli.size.utils.common_funcs.open", MagicMock()), ): yield @@ -42,27 +43,66 @@ def app(): def test_timeline_integration(ddev, mock_timeline, app): - assert ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--compressed", obj=app).exit_code == 0 - assert ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--csv", obj=app).exit_code == 0 - assert ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--markdown", obj=app).exit_code == 0 - assert ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--json", obj=app).exit_code == 0 assert ( ddev( "size", "timeline", "integration", "int1", + "--initial-commit", "commit1", + "--final-commit", "commit2", - "--save_to_png_path", - "out_int.png", + "--compressed", + obj=app, + ).exit_code + == 0 + ) + assert ( + ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--format", + "csv,markdown,json,png", + obj=app, + ).exit_code + == 0 + ) + assert ( + ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--show-gui", obj=app, ).exit_code == 0 ) - assert ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--show_gui", obj=app).exit_code == 0 assert ( - ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--threshold", "1000", obj=app).exit_code + ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--threshold", + "1000", + obj=app, + ).exit_code == 0 ) @@ -80,13 +120,13 @@ def mock_timeline_dependencies(): patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"), patch( "ddev.cli.size.timeline.get_valid_platforms", - return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}), + return_value=({"linux-x86_64", "macos-x86_.utils.common_funcs.64", "linux-aarch64", "windows-x86_64"}), ), patch("ddev.cli.size.timeline.get_dependency_list", return_value={"dep1"}), - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["linux-x86_64-3.12"]), - patch("os.path.isfile", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.path.isdir", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.listdir", return_value=["linux-x86_64-3.12"]), + patch("ddev.cli.size.utils.common_funcs.os.path.isfile", return_value=True), patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), patch( "ddev.cli.size.timeline.get_dependencies", @@ -104,29 +144,24 @@ def mock_timeline_dependencies(): patch("ddev.cli.size.timeline.plt.show"), patch("ddev.cli.size.timeline.plt.savefig"), patch("ddev.cli.size.timeline.plt.figure"), + patch("ddev.cli.size.utils.common_funcs.open", MagicMock()), ): yield def test_timeline_dependency(ddev, mock_timeline_dependencies, app): - assert ( - ddev( - "size", "timeline", "dependency", "dep1", "commit1", "commit2", "--platform", "linux-x86_64", obj=app - ).exit_code - == 0 - ) - assert ddev("size", "timeline", "dependency", "dep1", "commit1", "commit2", obj=app).exit_code == 0 assert ( ddev( "size", "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", "--platform", "linux-x86_64", - "--compressed", obj=app, ).exit_code == 0 @@ -137,11 +172,10 @@ def test_timeline_dependency(ddev, mock_timeline_dependencies, app): "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", - "--platform", - "linux-x86_64", - "--csv", obj=app, ).exit_code == 0 @@ -152,11 +186,13 @@ def test_timeline_dependency(ddev, mock_timeline_dependencies, app): "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", "--platform", "linux-x86_64", - "--markdown", + "--compressed", obj=app, ).exit_code == 0 @@ -167,53 +203,54 @@ def test_timeline_dependency(ddev, mock_timeline_dependencies, app): "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", "--platform", "linux-x86_64", - "--json", + "--format", + "csv,markdown,json,png", obj=app, ).exit_code == 0 ) + assert ( ddev( "size", "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", "--platform", "linux-x86_64", - "--save_to_png_path", - "out2.png", + "--show-gui", obj=app, ).exit_code == 0 ) - assert ( ddev( "size", "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", "--platform", "linux-x86_64", - "--show_gui", + "--threshold", + "1000", obj=app, ).exit_code == 0 ) - assert ( - ddev( - "size", "timeline", "dependency", "dep1", "--platform", "linux-x86_64", "--threshold", "1000", obj=app - ).exit_code - == 0 - ) def test_timeline_invalid_platform(ddev): @@ -235,7 +272,9 @@ def test_timeline_invalid_platform(ddev): "timeline", "dependency", "dep1", + "--initial-commit", "commit1", + "--final-commit", "commit2", "--compressed", "--platform", @@ -254,9 +293,9 @@ def test_timeline_integration_no_changes(ddev): with ( patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo), patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=[]), + patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.path.isdir", return_value=True), + patch("ddev.cli.size.utils.common_funcs.os.listdir", return_value=[]), patch( "ddev.cli.size.timeline.get_valid_platforms", return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}), @@ -264,54 +303,105 @@ def test_timeline_integration_no_changes(ddev): ): assert ( "No changes found" - in (result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2")).output - and result.exit_code == 0 - ) - assert ( - "No changes found" - in (result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--compressed")).output - and result.exit_code == 0 - ) - assert ( - "No changes found" - in (result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--csv")).output - and result.exit_code == 0 - ) - assert ( - "No changes found" - in (result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--markdown")).output + in ( + result := ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + ) + ).output and result.exit_code == 0 ) assert ( "No changes found" - in (result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--json")).output + in ( + result := ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--compressed", + ) + ).output and result.exit_code == 0 ) assert ( "No changes found" in ( result := ddev( - "size", "timeline", "integration", "int1", "commit1", "commit2", "--save_to_png_path", "out.png" + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--format", + "csv,markdown,json,png", ) ).output and result.exit_code == 0 ) assert ( "No changes found" - in (result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--show_gui")).output + in ( + result := ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--show-gui", + ) + ).output and result.exit_code == 0 ) assert ( "No changes found" in ( - result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--time", "2025-04-01") + result := ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--threshold", + "1000", + ) ).output and result.exit_code == 0 ) assert ( "No changes found" in ( - result := ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--threshold", "1000") + result := ddev( + "size", + "timeline", + "integration", + "int1", + "--initial-commit", + "commit1", + "--final-commit", + "commit2", + "--threshold", + "1000", + ) ).output and result.exit_code == 0 ) @@ -333,10 +423,20 @@ def test_timeline_integration_not_found(ddev): return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}), ), patch("ddev.cli.size.timeline.module_exists", return_value=False), - patch("matplotlib.pyplot.show"), - patch("matplotlib.pyplot.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.show"), + patch("ddev.cli.size.utils.common_funcs.plt.savefig"), + patch("ddev.cli.size.utils.common_funcs.plt.figure"), ): - result = ddev("size", "timeline", "integration", "missing_module", "c123456", "c2345667") + result = ddev( + "size", + "timeline", + "integration", + "missing_module", + "--initial-commit", + "c123456", + "--final-commit", + "c2345667", + ) assert result.exit_code != 0 assert "not found" in result.output @@ -358,7 +458,16 @@ def test_timeline_dependency_missing_no_platform(ddev): ), patch("ddev.cli.size.timeline.get_dependency_list", return_value=set()), ): - result = ddev("size", "timeline", "dependency", "missing_module", "c123456", "c2345667") + result = ddev( + "size", + "timeline", + "dependency", + "missing_module", + "--initial-commit", + "c123456", + "--final-commit", + "c2345667", + ) assert result.exit_code != 0 assert "Dependency missing_module not found in latest commit" in result.output @@ -385,7 +494,9 @@ def test_timeline_dependency_missing_for_platform(ddev, app): "timeline", "dependency", "missing_module", + "--initial-commit", "c123456", + "--final-commit", "c2345667", "--platform", "linux-x86_64", @@ -420,7 +531,9 @@ def test_timeline_dependency_no_changes(ddev, app): "timeline", "dependency", "dep1", + "--initial-commit", "c123456", + "--final-commit", "c2345667", "--platform", "linux-x86_64", diff --git a/ddev/tests/size/test_common.py b/ddev/tests/size/test_common.py index 00469e80f2ec8..ac067288925b0 100644 --- a/ddev/tests/size/test_common.py +++ b/ddev/tests/size/test_common.py @@ -1,9 +1,9 @@ import json import os from pathlib import Path -from unittest.mock import MagicMock, mock_open, patch +from unittest.mock import MagicMock, Mock, mock_open, patch -from ddev.cli.size.common import ( +from ddev.cli.size.utils.common_funcs import ( compress, convert_to_human_readable_size, extract_version_from_about_py, @@ -12,12 +12,14 @@ get_dependencies_sizes, get_files, get_gitignore_files, + get_org, get_valid_platforms, get_valid_versions, is_correct_dependency, is_valid_integration, - print_csv, - print_json, + save_csv, + save_json, + save_markdown, ) @@ -113,7 +115,9 @@ def test_get_dependencies_sizes(): mock_response.status_code = 200 mock_response.headers = {"Content-Length": "12345"} with patch("requests.head", return_value=mock_response): - file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"], ["1.1.1"], True) + file_data = get_dependencies_sizes( + ["dependency1"], ["https://example.com/dependency1/dependency1-1.1.1-.whl"], ["1.1.1"], True + ) assert file_data == [ { "Name": "dependency1", @@ -125,7 +129,7 @@ def test_get_dependencies_sizes(): ] -def test_format_modules_multiple_platform(): +def test_format_modules(): modules = [ {"Name": "module1", "Type": "A", "Size_Bytes": 1500}, {"Name": "module2", "Type": "B", "Size_Bytes": 3000}, @@ -150,31 +154,7 @@ def test_format_modules_multiple_platform(): }, ] - assert format_modules(modules, platform, version, True) == expected_output - - -def test_format_modules_one_plat(): - modules = [ - {"Name": "module1", "Type": "A", "Size_Bytes": 1500}, - {"Name": "module2", "Type": "B", "Size_Bytes": 3000}, - ] - platform = "linux-aarch64" - version = "3.12" - - expected_output = [ - { - "Name": "module1", - "Type": "A", - "Size_Bytes": 1500, - }, - { - "Name": "module2", - "Type": "B", - "Size_Bytes": 3000, - }, - ] - - assert format_modules(modules, platform, version, False) == expected_output + assert format_modules(modules, platform, version) == expected_output def test_get_files_grouped_and_with_versions(): @@ -197,14 +177,19 @@ def mock_getsize(path): return file_sizes[Path(path)] with ( - patch("os.walk", return_value=[(str(p), dirs, files) for p, dirs, files in os_walk_output]), - patch("os.path.getsize", side_effect=mock_getsize), - patch("ddev.cli.size.common.get_gitignore_files", return_value=set()), - patch("ddev.cli.size.common.is_valid_integration", side_effect=mock_is_valid_integration), - patch("ddev.cli.size.common.extract_version_from_about_py", return_value="1.2.3"), - patch("ddev.cli.size.common.convert_to_human_readable_size", side_effect=lambda s: f"{s / 1024:.2f} KB"), + patch( + "ddev.cli.size.utils.common_funcs.os.walk", + return_value=[(str(p), dirs, files) for p, dirs, files in os_walk_output], + ), + patch("ddev.cli.size.utils.common_funcs.os.path.getsize", side_effect=mock_getsize), + patch("ddev.cli.size.utils.common_funcs.get_gitignore_files", return_value=set()), + patch("ddev.cli.size.utils.common_funcs.is_valid_integration", side_effect=mock_is_valid_integration), + patch("ddev.cli.size.utils.common_funcs.extract_version_from_about_py", return_value="1.2.3"), + patch( + "ddev.cli.size.utils.common_funcs.convert_to_human_readable_size", + side_effect=lambda s: f"{s / 1024:.2f} KB", + ), ): - result = get_files(repo_path, compressed=False) expected = [ @@ -231,13 +216,13 @@ def test_get_gitignore_files(): mock_gitignore = f"__pycache__{os.sep}\n*.log\n" # Sample .gitignore file repo_path = "fake_repo" with patch("builtins.open", mock_open(read_data=mock_gitignore)): - with patch("os.path.exists", return_value=True): + with patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True): ignored_patterns = get_gitignore_files(repo_path) assert ignored_patterns == ["__pycache__" + os.sep, "*.log"] def test_compress(): - fake_content = b'a' * 16384 + fake_content = b"a" * 16384 original_size = len(fake_content) m = mock_open(read_data=fake_content) @@ -249,64 +234,84 @@ def test_compress(): assert compressed_size < original_size -def test_print_csv(): +def test_save_csv(): + mock_file = mock_open() mock_app = MagicMock() + modules = [ - {"Name": "module1", "Size B": 123, "Size": "2 B"}, - {"Name": "module,with,comma", "Size B": 456, "Size": "2 B"}, + {"Name": "module1", "Size_Bytes": 123, "Size": "2 B"}, + {"Name": "module,with,comma", "Size_Bytes": 456, "Size": "2 B"}, ] - print_csv(mock_app, modules=modules) + with patch("ddev.cli.size.utils.common_funcs.open", mock_file): + save_csv(mock_app, modules, "output.csv") - expected_calls = [ - (("Name,Size B",),), - (('module1,123',),), - (('"module,with,comma",456',),), - ] + mock_file.assert_called_once_with("output.csv", "w", encoding="utf-8") + handle = mock_file() + + expected_writes = ["Name,Size_Bytes\n", "module1,123\n", '"module,with,comma",456\n'] - actual_calls = mock_app.display.call_args_list - assert actual_calls == expected_calls + assert handle.write.call_args_list == [((line,),) for line in expected_writes] -def test_print_json(): +def test_save_json(): mock_app = MagicMock() + mock_file = mock_open() modules = [ {"name": "mod1", "size": "100"}, {"name": "mod2", "size": "200"}, {"name": "mod3", "size": "300"}, ] - print_json(mock_app, modules) - - expected_calls = [ - (("[",),), - (('{"name": "mod1", "size": "100"}',),), - ((",",),), - (('{"name": "mod2", "size": "200"}',),), - ((",",),), - (('{"name": "mod3", "size": "300"}',),), - (("]",),), - ] - actual_calls = mock_app.display.call_args_list - print(actual_calls) - assert actual_calls == expected_calls + with patch("ddev.cli.size.utils.common_funcs.open", mock_file): + save_json(mock_app, "output.json", modules) - result = "".join(call[0][0] for call in actual_calls) - parsed = json.loads(result) - assert parsed == [ - {"name": "mod1", "size": "100"}, - {"name": "mod2", "size": "200"}, - {"name": "mod3", "size": "300"}, + mock_file.assert_called_once_with("output.json", "w", encoding="utf-8") + handle = mock_file() + + expected_json = json.dumps(modules, indent=2) + + written_content = "".join(call.args[0] for call in handle.write.call_args_list) + assert written_content == expected_json + + mock_app.display.assert_called_once_with("JSON file saved to output.json") + + +def test_save_markdown(): + mock_app = MagicMock() + mock_file = mock_open() + + modules = [ + {"Name": "module1", "Size_Bytes": 123, "Size": "2 B", "Type": "Integration", "Platform": "linux-x86_64"}, + {"Name": "module2", "Size_Bytes": 456, "Size": "4 B", "Type": "Dependency", "Platform": "linux-x86_64"}, ] + with patch("ddev.cli.size.utils.common_funcs.open", mock_file): + save_markdown(mock_app, "Status", modules, "output.md") + + mock_file.assert_called_once_with("output.md", "a", encoding="utf-8") + handle = mock_file() + + expected_writes = ( + "# Status\n\n" + "## Platform: linux-x86_64\n\n" + "| Name | Size | Type | Platform |\n" + "| --- | --- | --- | --- |\n" + "| module1 | 2 B | Integration | linux-x86_64 |\n" + "| module2 | 4 B | Dependency | linux-x86_64 |\n" + ) + + written_content = "".join(call.args[0] for call in handle.write.call_args_list) + assert written_content == expected_writes + def test_extract_version_from_about_py_pathlib(): # Usa Path para compatibilidad multiplataforma fake_path = Path("some") / "module" / "__about__.py" fake_content = "__version__ = '1.2.3'\n" - with patch("builtins.open", mock_open(read_data=fake_content)): + with patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=fake_content)): version = extract_version_from_about_py(str(fake_path)) assert version == "1.2.3" @@ -316,7 +321,35 @@ def test_extract_version_from_about_py_no_version_pathlib(): fake_path = Path("another") / "module" / "__about__.py" fake_content = "version = 'not_defined'\n" - with patch("builtins.open", mock_open(read_data=fake_content)): + with patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=fake_content)): version = extract_version_from_about_py(str(fake_path)) assert version == "" + + +def test_get_org(): + mock_app = Mock() + mock_path = Mock() + + toml_data = """ + [orgs.default] + api_key = "test_api_key" + app_key = "test_app_key" + site = "datadoghq.com" + """ + + mock_app.config_file.path = mock_path + + with ( + patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=toml_data)), + patch.object(mock_path, "open", mock_open(read_data=toml_data)), + ): + result = get_org(mock_app, "default") + + expected = { + "api_key": "test_api_key", + "app_key": "test_app_key", + "site": "datadoghq.com", + } + + assert result == expected diff --git a/ddev/tests/size/test_diff.py b/ddev/tests/size/test_diff.py index f5ff3fc5000c4..34c0b3406f420 100644 --- a/ddev/tests/size/test_diff.py +++ b/ddev/tests/size/test_diff.py @@ -4,8 +4,8 @@ import os -from ddev.cli.size.common import convert_to_human_readable_size from ddev.cli.size.diff import get_diff +from ddev.cli.size.utils.common_funcs import convert_to_human_readable_size def to_native_path(path: str) -> str: diff --git a/ddev/tests/size/test_timeline.py b/ddev/tests/size/test_timeline.py index 331a9c62a0c04..15dce636ac21a 100644 --- a/ddev/tests/size/test_timeline.py +++ b/ddev/tests/size/test_timeline.py @@ -16,9 +16,12 @@ def test_get_compressed_files(): with ( - patch("os.walk", return_value=[(os.path.join("fake_repo", "datadog_checks"), [], ["__about__.py"])]), - patch("os.path.relpath", return_value=os.path.join("datadog_checks", "__about__.py")), - patch("os.path.exists", return_value=True), + patch( + "ddev.cli.size.timeline.os.walk", + return_value=[(os.path.join("fake_repo", "datadog_checks"), [], ["__about__.py"])], + ), + patch("ddev.cli.size.timeline.os.path.relpath", return_value=os.path.join("datadog_checks", "__about__.py")), + patch("ddev.cli.size.timeline.os.path.exists", return_value=True), patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), patch("ddev.cli.size.timeline.is_valid_integration", return_value=True), patch("ddev.cli.size.timeline.compress", return_value=1234), @@ -48,9 +51,12 @@ def test_get_compressed_files_deleted_only(): with ( patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), - patch("os.walk", return_value=[]), - patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}{os.sep}", "")), - patch("os.path.exists", return_value=False), + patch("ddev.cli.size.timeline.os.walk", return_value=[]), + patch( + "ddev.cli.size.timeline.os.path.relpath", + side_effect=lambda path, _: path.replace(f"{repo_path}{os.sep}", ""), + ), + patch("ddev.cli.size.timeline.os.path.exists", return_value=False), ): file_data = get_files(repo_path, module, commit, date, author, message, [], True) @@ -101,7 +107,7 @@ def test_trim_modules_keep_some_remove_some(): def test_get_dependency(): content = """dep1 @ https://example.com/dep1/dep1-1.1.1-.whl dep2 @ https://example.com/dep2/dep2-1.1.2-.whl""" - with patch("builtins.open", mock_open(read_data=content)): + with patch("ddev.cli.size.timeline.open", mock_open(read_data=content)): url, version = get_dependency_data(Path("some") / "path" / "file.txt", "dep2") assert (url, version) == ("https://example.com/dep2/dep2-1.1.2-.whl", "1.1.2") @@ -116,9 +122,9 @@ def make_mock_response(size): def test_get_dependency_size(): mock_response = make_mock_response("45678") - with patch("requests.head", return_value=mock_response): + with patch("ddev.cli.size.timeline.requests.head", return_value=mock_response): info = get_dependency_size( - "https://example.com/file-1.1.1-.whl", + "https://example.com/dep1/dep1-1.1.1-.whl", "1.1.1", "abc1234", datetime(2025, 4, 4).date(), @@ -138,11 +144,14 @@ def test_get_dependency_size(): def test_get_compressed_dependencies(): with ( - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.path.isfile", return_value=True), - patch("os.listdir", return_value=["linux-x86_64_3.12.txt"]), - patch("ddev.cli.size.timeline.get_dependency_data", return_value=("https://example.com/dep1.whl", '1.1.1')), + patch("ddev.cli.size.timeline.os.path.exists", return_value=True), + patch("ddev.cli.size.timeline.os.path.isdir", return_value=True), + patch("ddev.cli.size.timeline.os.path.isfile", return_value=True), + patch("ddev.cli.size.timeline.os.listdir", return_value=["linux-x86_64_3.12.txt"]), + patch( + "ddev.cli.size.timeline.get_dependency_data", + return_value=("https://example.com/dep1/dep1-1.1.1-.whl", '1.1.1'), + ), patch("ddev.cli.size.timeline.requests.head", return_value=make_mock_response("12345")), ): result = get_dependencies(