Skip to content

Commit 06fdc7c

Browse files
committed
allow existing tests to work with updated model, including C instance and 1.15xIPC for m6i
1 parent 4e034b9 commit 06fdc7c

File tree

7 files changed

+12
-12
lines changed

7 files changed

+12
-12
lines changed

tests/netflix/test_cassandra.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def test_capacity_small_fast():
6161
)[0]
6262
small_result = cap_plan.candidate_clusters.zonal[0]
6363
# We really should just pay for CPU here
64-
assert small_result.instance.name.startswith("m")
64+
assert small_result.instance.name.startswith("c")
6565

6666
cores = small_result.count * small_result.instance.cpu
6767
assert 30 <= cores <= 80
@@ -158,7 +158,7 @@ def test_capacity_high_writes():
158158
extra_model_arguments={"copies_per_region": 2},
159159
)[0]
160160
high_writes_result = cap_plan.candidate_clusters.zonal[0]
161-
assert high_writes_result.instance.family.startswith("m")
161+
assert high_writes_result.instance.family.startswith("c")
162162
assert high_writes_result.count > 4
163163

164164
num_cpus = high_writes_result.instance.cpu * high_writes_result.count

tests/netflix/test_cassandra_uncertain.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def test_increasing_qps_simple():
108108
)
109109

110110
# We should generally want cheap CPUs
111-
assert all(r[0][0] in ("r", "m", "i") for r in result)
111+
assert all(r[0][0] in ("r", "m", "i", "c") for r in result)
112112

113113
# Should have more capacity as requirement increases
114114
x = [r[1] for r in result]
@@ -209,9 +209,7 @@ def test_worn_dataset_ebs():
209209
<= lr.candidate_clusters.annual_costs["cassandra.zonal-clusters"]
210210
< 1_000_000
211211
)
212-
assert lr_cluster.instance.name.startswith(
213-
"m5."
214-
) or lr_cluster.instance.name.startswith("r5.")
212+
assert lr_cluster.instance.family in ("m5", "r5", "r6a")
215213
assert lr_cluster.attached_drives[0].name == "gp3"
216214
# gp2 should not provision massive drives, prefer to upcolor
217215
assert lr_cluster.attached_drives[0].size_gib < 9000

tests/netflix/test_counter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def test_counter_increasing_qps_simple():
6969
# We should generally want cheap CPUs for Cassandra
7070
zonal_families = {r[0] for r in zonal_result}
7171
assert all(
72-
family[0] in ("r", "m", "i") for family in zonal_families
72+
family[0] in ("r", "m", "c") for family in zonal_families
7373
), f"{zonal_families}"
7474

7575
# We just want ram and cpus for a java app

tests/netflix/test_crdb.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def test_crdb_basic():
3333
lr_cluster = lr.candidate_clusters.zonal[0]
3434

3535
# Resulting cluster should not be too expensive
36-
assert 2000 < lr.candidate_clusters.total_annual_cost < 10_000
36+
assert 1900 < lr.candidate_clusters.total_annual_cost < 10_000
3737

3838
# Should have enough disk space for around 80GiB of data in a single
3939
# replica (compression). Also that drive should be ephemeral

tests/netflix/test_elasticsearch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def test_es_increasing_qps_simple():
4040
for zonal in cap_plan.least_regret[0].candidate_clusters.zonal:
4141
zonal_result[zonal.cluster_type].append(zonal_summary(zonal))
4242

43-
expected_families = {"r", "m", "i"}
43+
expected_families = {"r", "m", "c", "i"}
4444
for cluster_type in list(zonal_result.keys()):
4545
zonal_by_increasing_qps = zonal_result[cluster_type]
4646

tests/netflix/test_evcache.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,10 @@ def test_evcache_inmemory_low_latency_reads_cpu():
6868
* candidate.candidate_clusters.zonal[0].instance.cpu_ghz
6969
)
7070

71-
assert total_cpu_power > 1100
72-
71+
assert total_cpu_power > 1000, (f"CPU power is not sufficient for low latency reads, with"
72+
f" {candidate.candidate_clusters.zonal[0].count} *"
73+
f" {candidate.candidate_clusters.zonal[0].instance.name},"
74+
f" total= {total_cpu_power}.")
7375

7476
class BufferComponents:
7577
pass

tests/netflix/test_java_app.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ def test_java_app_small_but_high_qps():
6565
cores = java_result.count * java_result.instance.cpu
6666
assert_similar_compute(
6767
expected_shape=shape("m6i.xlarge"),
68-
expected_count=50,
68+
expected_count=43,
6969
actual_shape=java_result.instance,
7070
actual_count=java_result.count,
7171
# Don't care about memory

0 commit comments

Comments
 (0)