Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

black format all the code #16

Merged
merged 11 commits into from
Oct 16, 2024
59 changes: 43 additions & 16 deletions apps/coverage_capacity_optimization/cco_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def rf_to_coverage_dataframe(
over_coverage_threshold: float = 0,
growth_rate: float = 1,
) -> pd.DataFrame:

if lambda_ <= 0 or lambda_ >= 1:
raise ValueError("lambda_ must be between 0 and 1 (noninclusive)")

Expand All @@ -65,13 +64,20 @@ def rf_to_coverage_dataframe(
coverage_dataframe["weak_coverage"] = np.minimum(0, h)
coverage_dataframe["overly_covered"] = (h > 0) & (g <= 0)
coverage_dataframe["over_coverage"] = np.minimum(0, g)
coverage_dataframe["covered"] = ~coverage_dataframe["weakly_covered"] & ~coverage_dataframe["overly_covered"]
coverage_dataframe["covered"] = (
~coverage_dataframe["weakly_covered"]
& ~coverage_dataframe["overly_covered"]
)

# TODO : deprecate the below notion
# soft_weak_coverage = sigmoid(h, growth_rate)
# soft_over_coverage = sigmoid(g, growth_rate)
coverage_dataframe["soft_weak_coverage"] = 1000 * np.tanh(0.05 * growth_rate * h)
coverage_dataframe["soft_over_coverage"] = 1000 * np.tanh(0.05 * growth_rate * g)
coverage_dataframe["soft_weak_coverage"] = 1000 * np.tanh(
0.05 * growth_rate * h
)
coverage_dataframe["soft_over_coverage"] = 1000 * np.tanh(
0.05 * growth_rate * g
)
coverage_dataframe["network_coverage_utility"] = (
lambda_ * coverage_dataframe["soft_weak_coverage"]
+ (1 - lambda_) * coverage_dataframe["soft_over_coverage"]
Expand All @@ -83,8 +89,12 @@ def get_weak_over_coverage_percentages(
coverage_dataframe: pd.DataFrame,
) -> Tuple[float, float]:
n_points = len(coverage_dataframe.index)
weak_coverage_percent = 100 * coverage_dataframe["weakly_covered"].sum() / n_points
over_coverage_percent = 100 * coverage_dataframe["overly_covered"].sum() / n_points
weak_coverage_percent = (
100 * coverage_dataframe["weakly_covered"].sum() / n_points
)
over_coverage_percent = (
100 * coverage_dataframe["overly_covered"].sum() / n_points
)
return weak_coverage_percent, over_coverage_percent

@staticmethod
Expand Down Expand Up @@ -122,18 +132,26 @@ def get_cco_objective_value(
coverage_dataframe,
)
)
augmented_coverage_df_with_normalized_traffic_model["network_coverage_utility"] = (
augmented_coverage_df_with_normalized_traffic_model["normalized_traffic_statistic"]
augmented_coverage_df_with_normalized_traffic_model[
"network_coverage_utility"
] = (
augmented_coverage_df_with_normalized_traffic_model[
"normalized_traffic_statistic"
]
* coverage_dataframe["network_coverage_utility"]
)
coverage_dataframe["network_coverage_utility"] = augmented_coverage_df_with_normalized_traffic_model[
coverage_dataframe[
"network_coverage_utility"
] = augmented_coverage_df_with_normalized_traffic_model[
"network_coverage_utility"
]

if active_ids_list is None:
return -math.inf

active_df = coverage_dataframe[coverage_dataframe[id_field].isin(active_ids_list)]
active_df = coverage_dataframe[
coverage_dataframe[id_field].isin(active_ids_list)
]
active_sector_metric = active_df.groupby(id_field)["network_coverage_utility"]

if cco_metric == CcoMetric.PIXEL:
Expand Down Expand Up @@ -161,7 +179,9 @@ def add_tile_x_and_tile_y(
Dataframe with tile_x and tile_y columns appended

"""
tile_coords = list(zip(coverage_dataframe[loc_x_field], coverage_dataframe[loc_y_field]))
tile_coords = list(
zip(coverage_dataframe[loc_x_field], coverage_dataframe[loc_y_field])
)

coverage_dataframe["tile_x"], coverage_dataframe["tile_y"] = zip(
*map(
Expand Down Expand Up @@ -200,11 +220,16 @@ def augment_coverage_df_with_normalized_traffic_model(
"over_coverage",
"""

sum_of_desired_traffic_statistic_across_all_tiles = traffic_model_df[desired_traffic_statistic_col].sum()
sum_of_desired_traffic_statistic_across_all_tiles = traffic_model_df[
desired_traffic_statistic_col
].sum()
traffic_model_df["normalized_traffic_statistic"] = (
traffic_model_df[desired_traffic_statistic_col] / sum_of_desired_traffic_statistic_across_all_tiles
traffic_model_df[desired_traffic_statistic_col]
/ sum_of_desired_traffic_statistic_across_all_tiles
)
coverage_dataframe_with_bing_tiles = CcoEngine.add_tile_x_and_tile_y(
coverage_df
)
coverage_dataframe_with_bing_tiles = CcoEngine.add_tile_x_and_tile_y(coverage_df)
augmented_coverage_df_with_normalized_traffic_model = pd.merge(
traffic_model_df,
coverage_dataframe_with_bing_tiles,
Expand Down Expand Up @@ -235,6 +260,8 @@ def traffic_normalized_cco_metric(coverage_dataframe: pd.DataFrame) -> float:
# only one of weak_coverage and over_coverage can be simultaneously 1
# so, the logic below does not double count
return (
coverage_dataframe["normalized_traffic_statistic"] * coverage_dataframe["weak_coverage"]
+ coverage_dataframe["normalized_traffic_statistic"] * coverage_dataframe["over_coverage"]
coverage_dataframe["normalized_traffic_statistic"]
* coverage_dataframe["weak_coverage"]
+ coverage_dataframe["normalized_traffic_statistic"]
* coverage_dataframe["over_coverage"]
).sum()
4 changes: 3 additions & 1 deletion apps/coverage_capacity_optimization/cco_example_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@
)

# resolve the model status -- this blocking call ensures training is done and model is available for use
model_status: ModelStatus = radp_helper.resolve_model_status(MODEL_ID, wait_interval=3, max_attempts=10, verbose=True)
model_status: ModelStatus = radp_helper.resolve_model_status(
MODEL_ID, wait_interval=3, max_attempts=10, verbose=True
)

# handle an exception if one occurred
if not model_status.success:
Expand Down
29 changes: 23 additions & 6 deletions apps/coverage_capacity_optimization/dgpco_cco.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,11 @@ def _single_step(
"""Single step of DGPCO."""

# calculate new metric
(current_rf_dataframe, current_coverage_dataframe, current_cco_objective,) = self._calc_metric(
(
current_rf_dataframe,
current_coverage_dataframe,
current_cco_objective,
) = self._calc_metric(
lambda_=lambda_,
weak_coverage_threshold=weak_coverage_threshold,
over_coverage_threshold=over_coverage_threshold,
Expand All @@ -151,8 +155,12 @@ def _single_step(
# pull the cell config index
cell_config_index = self.config.index[self.config["cell_id"] == cell_id][0]

orig_el_idx = self.valid_configuration_values[constants.CELL_EL_DEG].index(orig_el_deg)
cur_el_idx = self.valid_configuration_values[constants.CELL_EL_DEG].index(cur_el_deg)
orig_el_idx = self.valid_configuration_values[constants.CELL_EL_DEG].index(
orig_el_deg
)
cur_el_idx = self.valid_configuration_values[constants.CELL_EL_DEG].index(
cur_el_deg
)

for d in opt_delta:
new_el_idx = orig_el_idx + d
Expand All @@ -161,11 +169,15 @@ def _single_step(
# we do not want to check current value
continue

if new_el_idx < 0 or new_el_idx >= len(self.valid_configuration_values[constants.CELL_EL_DEG]):
if new_el_idx < 0 or new_el_idx >= len(
self.valid_configuration_values[constants.CELL_EL_DEG]
):
# we do not want to wrap around, since that would not be a neighboring tilt
continue

new_el = self.valid_configuration_values[constants.CELL_EL_DEG][new_el_idx]
new_el = self.valid_configuration_values[constants.CELL_EL_DEG][
new_el_idx
]

# update the cell config el_degree
self.config.loc[cell_config_index, constants.CELL_EL_DEG] = new_el
Expand Down Expand Up @@ -258,7 +270,12 @@ def _single_step(
logging.info(f"\nIn epoch: {epoch:02}/{num_epochs}...")

# Perform one step of DGPCO
(new_opt_el, new_rf_dataframe, new_coverage_dataframe, new_cco_objective_value,) = _single_step(
(
new_opt_el,
new_rf_dataframe,
new_coverage_dataframe,
new_cco_objective_value,
) = _single_step(
cell_id=cell_id,
orig_el_deg=orig_el_deg,
cur_el_deg=cur_el_deg,
Expand Down
23 changes: 16 additions & 7 deletions apps/coverage_capacity_optimization/tests/test_cco_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
class TestCCO(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dummy_df = pd.DataFrame(data={CELL_ID: [1, 2, 73], LOC_X: [3, 4, 89], LOC_Y: [7, 8, 10]})
cls.dummy_df = pd.DataFrame(
data={CELL_ID: [1, 2, 73], LOC_X: [3, 4, 89], LOC_Y: [7, 8, 10]}
)

def test_invalid_lambda(self):
self.dummy_df["rsrp_dbm"] = [98, 92, 86]
Expand All @@ -37,7 +39,8 @@ def testing_weakly_covered(self):
self.dummy_df, weak_coverage_threshold=-100, over_coverage_threshold=0
)
self.assertEqual(
returned_df["weakly_covered"][returned_df["weakly_covered"] == 1].count() == 1,
returned_df["weakly_covered"][returned_df["weakly_covered"] == 1].count()
== 1,
True,
)

Expand All @@ -58,7 +61,8 @@ def test_overly_covered(self):
self.dummy_df, weak_coverage_threshold=-100, over_coverage_threshold=0
)
self.assertEqual(
returned_df["overly_covered"][returned_df["overly_covered"] == 0].count() == 1,
returned_df["overly_covered"][returned_df["overly_covered"] == 0].count()
== 1,
True,
)

Expand All @@ -81,11 +85,13 @@ def testing_some_not_weakly_or_overcovered(self):
True,
)
self.assertEqual(
returned_df["weakly_covered"][returned_df["weakly_covered"] == 1].count() == 1,
returned_df["weakly_covered"][returned_df["weakly_covered"] == 1].count()
== 1,
True,
)
self.assertEqual(
returned_df["overly_covered"][returned_df["overly_covered"] == 1].count() == 1,
returned_df["overly_covered"][returned_df["overly_covered"] == 1].count()
== 1,
True,
)

Expand Down Expand Up @@ -168,11 +174,14 @@ def test_get_cco_objective_value(self):
)
# asserting the multiplied version of network_coverage_utility
self.assertTrue(
coverage_df["network_coverage_utility"][0] == 0.24 and coverage_df["network_coverage_utility"][1] == 0.24
coverage_df["network_coverage_utility"][0] == 0.24
and coverage_df["network_coverage_utility"][1] == 0.24
)

# asserting the average of network_coverage_utility
self.assertTrue(0.24 == coverage_df["network_coverage_utility"].sum() / len(coverage_df))
self.assertTrue(
0.24 == coverage_df["network_coverage_utility"].sum() / len(coverage_df)
)

# asserting the returned cco_objective_value
expected_value = 0.24
Expand Down
43 changes: 29 additions & 14 deletions apps/energy_savings/energy_savings_gym.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,9 @@ def __init__(
self.prediction_dfs = dict()
for cell_id in site_config_df.cell_id:
prediction_dfs = BayesianDigitalTwin.create_prediction_frames(
site_config_df=self.site_config_df[self.site_config_df.cell_id.isin([cell_id])].reset_index(),
site_config_df=self.site_config_df[
self.site_config_df.cell_id.isin([cell_id])
].reset_index(),
prediction_frame_template=prediction_frame_template[cell_id],
)
self.prediction_dfs.update(prediction_dfs)
Expand Down Expand Up @@ -140,11 +142,13 @@ def __init__(

# Reward when all cells are off:
self.r_norm = (1 - lambda_) * (
-10 * np.log10(self.num_cells) - over_coverage_threshold + min_rsrp - weak_coverage_threshold
-10 * np.log10(self.num_cells)
- over_coverage_threshold
+ min_rsrp
- weak_coverage_threshold
)

def _next_observation(self):

if self.ue_tracks:
data = next(self.ue_tracks)
for batch in data:
Expand Down Expand Up @@ -198,25 +202,32 @@ def _next_observation(self):
)
if self.traffic_model_df is None:
cco_objective_metric = (
coverage_dataframe["weak_coverage"].mean() + coverage_dataframe["over_coverage"].mean()
coverage_dataframe["weak_coverage"].mean()
+ coverage_dataframe["over_coverage"].mean()
)

else:
processed_coverage_dataframe = CcoEngine.augment_coverage_df_with_normalized_traffic_model(
self.traffic_model_df,
"avg_of_average_egress_kbps_across_all_time",
coverage_dataframe,
processed_coverage_dataframe = (
CcoEngine.augment_coverage_df_with_normalized_traffic_model(
self.traffic_model_df,
"avg_of_average_egress_kbps_across_all_time",
coverage_dataframe,
)
)

cco_objective_metric = CcoEngine.traffic_normalized_cco_metric(processed_coverage_dataframe)
cco_objective_metric = CcoEngine.traffic_normalized_cco_metric(
processed_coverage_dataframe
)

# Output for debugging/postprocessing purposes
if self.debug:
self.rf_dataframe = rf_dataframe
self.coverage_dataframe = coverage_dataframe

return (
EnergySavingsGym.ENERGY_MAX_PER_CELL * sum(self.on_off_state) / len(self.on_off_state),
EnergySavingsGym.ENERGY_MAX_PER_CELL
* sum(self.on_off_state)
/ len(self.on_off_state),
0.0,
cco_objective_metric, # TODO : normalized this against MAX_CLUSTER_CCO
)
Expand All @@ -230,7 +241,11 @@ def reward(
if energy_consumption == 0:
return self.r_norm
else:
return self.lambda_ * -1.0 * energy_consumption + (1 - self.lambda_) * cco_objective_metric - self.r_norm
return (
self.lambda_ * -1.0 * energy_consumption
+ (1 - self.lambda_) * cco_objective_metric
- self.r_norm
)

def make_action_from_state(self):
action = np.empty(self.num_cells, dtype=int)
Expand All @@ -242,7 +257,6 @@ def make_action_from_state(self):
return action

def _take_action(self, action):

num_cells = len(self.site_config_df)
# on_off_cell_state captures the on/off state of each cell (on is `1`)
on_off_cell_state = [1] * num_cells
Expand Down Expand Up @@ -277,7 +291,6 @@ def reset(self):
return self._next_observation()

def step(self, action):

# Execute one time step within the environment
self._take_action(action)

Expand All @@ -293,7 +306,9 @@ def step(self, action):

return obs, reward, done, {}

def get_all_possible_actions(self, possible_actions: List[List[int]]) -> List[List[int]]:
def get_all_possible_actions(
self, possible_actions: List[List[int]]
) -> List[List[int]]:
"""
A recursive function to get all possible actions as a list.
Useful for bruteforce search.
Expand Down
8 changes: 2 additions & 6 deletions apps/example/example_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,7 @@
train_response = radp_client.train(
model_id=MODEL_ID,
params=TRAINING_PARAMS,
ue_training_data=pd.concat(
[pd.read_csv(file) for file in TRAINING_DATA_FILES]
),
ue_training_data=pd.concat([pd.read_csv(file) for file in TRAINING_DATA_FILES]),
topology=pd.read_csv(TOPOLOGY_FILE),
)

Expand All @@ -164,9 +162,7 @@
# run simulation on cumulative data passed to model
simulation_response = radp_client.simulation(
simulation_event=simulation_event,
ue_data=pd.concat(
[pd.read_csv(file) for file in PREDICTION_DATA_FILES]
),
ue_data=pd.concat([pd.read_csv(file) for file in PREDICTION_DATA_FILES]),
config=pd.read_csv(PREDICTION_CONFIG),
)
simulation_id = simulation_response["simulation_id"]
Expand Down
Loading
Loading