|
| 1 | +import logging |
| 2 | +import os |
| 3 | +from copy import deepcopy |
| 4 | +from functools import cached_property |
| 5 | +from typing import Any |
| 6 | + |
| 7 | +import torch |
| 8 | +from anemoi.datasets import open_dataset |
| 9 | + |
| 10 | +from ..runners.default import DefaultRunner |
| 11 | +from . import runner_registry |
| 12 | + |
| 13 | +LOG = logging.getLogger(__name__) |
| 14 | + |
| 15 | +# Possibly move the function(s) below to anemoi-models or anemoi-utils since it could be used in transfer learning. |
| 16 | + |
| 17 | + |
| 18 | +def contains_any(key, specifications): |
| 19 | + contained = False |
| 20 | + for specification in specifications: |
| 21 | + if specification in key: |
| 22 | + contained = True |
| 23 | + break |
| 24 | + return contained |
| 25 | + |
| 26 | + |
| 27 | +def update_state_dict( |
| 28 | + model, external_state_dict, keywords="", ignore_mismatched_layers=False, ignore_additional_layers=False |
| 29 | +): |
| 30 | + """Update the model's stated_dict with entries from an external state_dict. Only entries whose keys contain the specified keywords are considered.""" |
| 31 | + |
| 32 | + LOG.info("Updating model state dictionary.") |
| 33 | + |
| 34 | + if isinstance(keywords, str): |
| 35 | + keywords = [keywords] |
| 36 | + |
| 37 | + # select relevant part of external_state_dict |
| 38 | + reduced_state_dict = {k: v for k, v in external_state_dict.items() if contains_any(k, keywords)} |
| 39 | + model_state_dict = model.state_dict() |
| 40 | + |
| 41 | + # check layers and their shapes |
| 42 | + for key in list(reduced_state_dict): |
| 43 | + if key not in model_state_dict: |
| 44 | + if ignore_additional_layers: |
| 45 | + LOG.info("Skipping injection of %s, which is not in the model.", key) |
| 46 | + del reduced_state_dict[key] |
| 47 | + else: |
| 48 | + raise AssertionError(f"Layer {key} not in model. Consider setting 'ignore_additional_layers = True'.") |
| 49 | + elif reduced_state_dict[key].shape != model_state_dict[key].shape: |
| 50 | + if ignore_mismatched_layers: |
| 51 | + LOG.info("Skipping injection of %s due to shape mismatch.", key) |
| 52 | + LOG.info("Model shape: %s", model_state_dict[key].shape) |
| 53 | + LOG.info("Provided shape: %s", reduced_state_dict[key].shape) |
| 54 | + del reduced_state_dict[key] |
| 55 | + else: |
| 56 | + raise AssertionError( |
| 57 | + "Mismatch in shape of %s. Consider setting 'ignore_mismatched_layers = True'.", key |
| 58 | + ) |
| 59 | + |
| 60 | + # update |
| 61 | + model.load_state_dict(reduced_state_dict, strict=False) |
| 62 | + return model |
| 63 | + |
| 64 | + |
| 65 | +@runner_registry.register("external_graph") |
| 66 | +class ExternalGraphRunner(DefaultRunner): |
| 67 | + """Runner where the graph saved in the checkpoint is replaced by an externally provided one. |
| 68 | + Currently only supported as an extension of the default runner. |
| 69 | + """ |
| 70 | + |
| 71 | + def __init__( |
| 72 | + self, |
| 73 | + config: dict, |
| 74 | + graph: str, |
| 75 | + output_mask: dict | None = {}, |
| 76 | + graph_dataset: Any | None = None, |
| 77 | + check_state_dict: bool | None = True, |
| 78 | + ) -> None: |
| 79 | + """Initialize the ExternalGraphRunner. |
| 80 | +
|
| 81 | + Parameters |
| 82 | + ---------- |
| 83 | + config : Configuration |
| 84 | + Configuration for the runner. |
| 85 | + graph : str |
| 86 | + Path to the external graph. |
| 87 | + output_mask : dict | None |
| 88 | + Dictionary specifying the output mask. |
| 89 | + graph_dataset : Any | None |
| 90 | + Argument to open_dataset of anemoi-datasets that recreates the dataset used to build the data nodes of the graph. |
| 91 | + check_state_dict: bool | None |
| 92 | + Boolean specifying if reconstruction of statedict happens as expeceted. |
| 93 | + """ |
| 94 | + super().__init__(config) |
| 95 | + self.check_state_dict = check_state_dict |
| 96 | + self.graph_path = graph |
| 97 | + |
| 98 | + # If graph was build on other dataset, we need to adapt the dataloader |
| 99 | + if graph_dataset is not None: |
| 100 | + graph_ds = open_dataset(graph_dataset) |
| 101 | + LOG.info( |
| 102 | + "The external graph was built using a different anemoi-dataset than that in the checkpoint. " |
| 103 | + "Patching metadata to ensure correct data loading." |
| 104 | + ) |
| 105 | + self.checkpoint._metadata.patch( |
| 106 | + { |
| 107 | + "config": {"dataloader": {"dataset": graph_dataset}}, |
| 108 | + "dataset": {"shape": graph_ds.shape}, |
| 109 | + } |
| 110 | + ) |
| 111 | + |
| 112 | + # had to use private attributes because cached properties cause problems |
| 113 | + self.checkpoint._metadata._supporting_arrays = graph_ds.supporting_arrays() |
| 114 | + if "grid_indices" in self.checkpoint._metadata._supporting_arrays: |
| 115 | + num_grid_points = len(self.checkpoint._metadata._supporting_arrays["grid_indices"]) |
| 116 | + else: |
| 117 | + num_grid_points = graph_ds.shape[-1] |
| 118 | + self.checkpoint._metadata.number_of_grid_points = num_grid_points |
| 119 | + |
| 120 | + # Check if the external graph has the 'indices_connected_nodes' attribute |
| 121 | + # If so adapt dataloader and add supporting array |
| 122 | + data = self.checkpoint._metadata._config.graph.data |
| 123 | + assert data in self.graph.node_types, f"Node type {data} not found in external graph." |
| 124 | + if "indices_connected_nodes" in self.graph[data]: |
| 125 | + LOG.info( |
| 126 | + "The external graph has the 'indices_connected_nodes' attribute." |
| 127 | + "Patching metadata with MaskedGrid 'grid_indices' to ensure correct data loading." |
| 128 | + ) |
| 129 | + self.checkpoint._metadata.patch( |
| 130 | + { |
| 131 | + "config": { |
| 132 | + "dataloader": { |
| 133 | + "grid_indices": { |
| 134 | + "_target_": "anemoi.training.data.grid_indices.MaskedGrid", |
| 135 | + "nodes_name": data, |
| 136 | + "node_attribute_name": "indices_connected_nodes", |
| 137 | + } |
| 138 | + } |
| 139 | + } |
| 140 | + } |
| 141 | + ) |
| 142 | + LOG.info("Moving 'indices_connected_nodes' from external graph to supporting arrays as 'grid_indices'.") |
| 143 | + indices_connected_nodes = self.graph[data]["indices_connected_nodes"].numpy() |
| 144 | + self.checkpoint._supporting_arrays["grid_indices"] = indices_connected_nodes.squeeze() |
| 145 | + |
| 146 | + if output_mask: |
| 147 | + nodes = output_mask["nodes_name"] |
| 148 | + attribute = output_mask["attribute_name"] |
| 149 | + self.checkpoint._supporting_arrays["output_mask"] = self.graph[nodes][attribute].numpy().squeeze() |
| 150 | + LOG.info( |
| 151 | + "Moving attribute '%s' of nodes '%s' from external graph to supporting arrays as 'output_mask'.", |
| 152 | + attribute, |
| 153 | + nodes, |
| 154 | + ) |
| 155 | + |
| 156 | + @cached_property |
| 157 | + def graph(self): |
| 158 | + graph_path = self.graph_path |
| 159 | + assert os.path.isfile( |
| 160 | + graph_path |
| 161 | + ), f"No graph found at {graph_path}. An external graph needs to be specified in the config file for this runner." |
| 162 | + LOG.info("Loading external graph from path %s.", graph_path) |
| 163 | + return torch.load(graph_path, map_location="cpu", weights_only=False) |
| 164 | + |
| 165 | + @cached_property |
| 166 | + def model(self): |
| 167 | + # load the model from the checkpoint |
| 168 | + device = self.device |
| 169 | + self.device = "cpu" |
| 170 | + model_instance = super().model |
| 171 | + state_dict_ckpt = deepcopy(model_instance.state_dict()) |
| 172 | + |
| 173 | + # rebuild the model with the new graph |
| 174 | + model_instance.graph_data = self.graph |
| 175 | + model_instance.config = self.checkpoint._metadata._config |
| 176 | + model_instance._build_model() |
| 177 | + |
| 178 | + # reinstate the weights, biases and normalizer from the checkpoint |
| 179 | + # reinstating the normalizer is necessary for checkpoints that were created |
| 180 | + # using transfer learning, where the statistics as stored in the checkpoint |
| 181 | + # do not match the statistics used to build the normalizer in the checkpoint. |
| 182 | + model_instance = update_state_dict( |
| 183 | + model_instance, state_dict_ckpt, keywords=["bias", "weight", "processors.normalizer"] |
| 184 | + ) |
| 185 | + |
| 186 | + LOG.info("Successfully built model with external graph and reassigned model weights!") |
| 187 | + self.device = device |
| 188 | + return model_instance.to(self.device) |
0 commit comments