Skip to content

Commit f666b0f

Browse files
Merge pull request #1 from deepsense-ai/feature/master-update
Feature/master update
2 parents 4d0def5 + 6ffd1ac commit f666b0f

File tree

3 files changed

+85
-69
lines changed

3 files changed

+85
-69
lines changed

carla_birdeye_view/__init__.py

+60-49
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import carla
22
import logging
33
import numpy as np
4+
import cv2.cv2 as cv
45

56
from enum import IntEnum, auto, Enum
67
from pathlib import Path
@@ -21,22 +22,23 @@
2122
Dimensions,
2223
)
2324

24-
__all__ = ["BirdViewProducer", "DEFAULT_HEIGHT", "DEFAULT_WIDTH"]
25-
2625
LOGGER = logging.getLogger(__name__)
2726

28-
DEFAULT_HEIGHT = 336 # its 84m when density is 4px/m
29-
DEFAULT_WIDTH = 150 # its 37.5m when density is 4px/m
3027

31-
BirdView = np.ndarray # [np.uint8] with shape (level, y, x)
32-
RgbCanvas = np.ndarray # [np.uint8] with shape (y, x, 3)
28+
BirdView = np.ndarray # [np.uint8] with shape (height, width, channel)
29+
RgbCanvas = np.ndarray # [np.uint8] with shape (height, width, 3)
3330

3431

3532
class BirdViewCropType(Enum):
3633
FRONT_AND_REAR_AREA = auto() # Freeway mode
3734
FRONT_AREA_ONLY = auto() # Like in "Learning by Cheating"
3835

3936

37+
DEFAULT_HEIGHT = 336 # its 84m when density is 4px/m
38+
DEFAULT_WIDTH = 150 # its 37.5m when density is 4px/m
39+
DEFAULT_CROP_TYPE = BirdViewCropType.FRONT_AND_REAR_AREA
40+
41+
4042
class BirdViewMasks(IntEnum):
4143
PEDESTRIANS = 8
4244
RED_LIGHTS = 7
@@ -69,11 +71,6 @@ def bottom_to_top() -> List[int]:
6971
BirdViewMasks.ROAD: RGB.DIM_GRAY,
7072
}
7173

72-
BIRDVIEW_SHAPE_CHW = (len(RGB_BY_MASK), DEFAULT_HEIGHT, DEFAULT_WIDTH)
73-
BIRDVIEW_SHAPE_HWC = (DEFAULT_HEIGHT, DEFAULT_WIDTH, len(RGB_BY_MASK))
74-
75-
import cv2.cv2 as cv2
76-
7774

7875
def rotate(image, angle, center=None, scale=1.0):
7976
assert image.dtype == np.uint8
@@ -88,13 +85,13 @@ def rotate(image, angle, center=None, scale=1.0):
8885
center = (w // 2, h // 2)
8986

9087
# perform the rotation
91-
M = cv2.getRotationMatrix2D(center, angle, scale)
92-
rotated = cv2.warpAffine(
88+
M = cv.getRotationMatrix2D(center, angle, scale)
89+
rotated = cv.warpAffine(
9390
image,
9491
M,
9592
(w, h),
96-
flags=cv2.INTER_NEAREST,
97-
borderMode=cv2.BORDER_CONSTANT,
93+
flags=cv.INTER_NEAREST,
94+
borderMode=cv.BORDER_CONSTANT,
9895
borderValue=0,
9996
)
10097

@@ -131,20 +128,27 @@ def __init__(
131128
self,
132129
client: carla.Client,
133130
target_size: PixelDimensions,
131+
render_lanes_on_junctions: bool,
134132
pixels_per_meter: int = 4,
135-
crop_type: BirdViewCropType=BirdViewCropType.FRONT_AND_REAR_AREA
133+
crop_type: BirdViewCropType = BirdViewCropType.FRONT_AND_REAR_AREA,
136134
) -> None:
137135
self.client = client
138136
self.target_size = target_size
139-
self._pixels_per_meter = pixels_per_meter
137+
self.pixels_per_meter = pixels_per_meter
140138
self._crop_type = crop_type
141139

142140
if crop_type is BirdViewCropType.FRONT_AND_REAR_AREA:
143-
rendering_square_size = round(square_fitting_rect_at_any_rotation(self.target_size))
141+
rendering_square_size = round(
142+
square_fitting_rect_at_any_rotation(self.target_size)
143+
)
144144
elif crop_type is BirdViewCropType.FRONT_AREA_ONLY:
145145
# We must keep rendering size from FRONT_AND_REAR_AREA (in order to avoid rotation issues)
146-
enlarged_size = PixelDimensions(width=target_size.width, height=target_size.height * 2)
147-
rendering_square_size = round(square_fitting_rect_at_any_rotation(enlarged_size))
146+
enlarged_size = PixelDimensions(
147+
width=target_size.width, height=target_size.height * 2
148+
)
149+
rendering_square_size = round(
150+
square_fitting_rect_at_any_rotation(enlarged_size)
151+
)
148152
else:
149153
raise NotImplementedError
150154
self.rendering_area = PixelDimensions(
@@ -153,46 +157,50 @@ def __init__(
153157
self._world = client.get_world()
154158
self._map = self._world.get_map()
155159
self.masks_generator = MapMaskGenerator(
156-
client, pixels_per_meter=pixels_per_meter
160+
client,
161+
pixels_per_meter=pixels_per_meter,
162+
render_lanes_on_junctions=render_lanes_on_junctions,
157163
)
158164

159165
cache_path = self.parametrized_cache_path()
160-
if Path(cache_path).is_file():
161-
LOGGER.info(f"Loading cache from {cache_path}")
162-
with FileLock(f"{cache_path}.lock"):
166+
with FileLock(f"{cache_path}.lock"):
167+
if Path(cache_path).is_file():
168+
LOGGER.info(f"Loading cache from {cache_path}")
163169
static_cache = np.load(cache_path)
164170
self.full_road_cache = static_cache[0]
165171
self.full_lanes_cache = static_cache[1]
166172
self.full_centerlines_cache = static_cache[2]
167-
LOGGER.info(f"Loaded static layers from cache file: {cache_path}")
168-
else:
169-
LOGGER.warning(
170-
f"Cache file does not exist, generating cache at {cache_path}"
171-
)
172-
self.full_road_cache = self.masks_generator.road_mask()
173-
self.full_lanes_cache = self.masks_generator.lanes_mask()
174-
self.full_centerlines_cache = self.masks_generator.centerlines_mask()
175-
static_cache = np.stack([self.full_road_cache, self.full_lanes_cache, self.full_centerlines_cache])
176-
with FileLock(f"{cache_path}.lock"):
173+
LOGGER.info(f"Loaded static layers from cache file: {cache_path}")
174+
else:
175+
LOGGER.warning(
176+
f"Cache file does not exist, generating cache at {cache_path}"
177+
)
178+
self.full_road_cache = self.masks_generator.road_mask()
179+
self.full_lanes_cache = self.masks_generator.lanes_mask()
180+
self.full_centerlines_cache = self.masks_generator.centerlines_mask()
181+
static_cache = np.stack(
182+
[
183+
self.full_road_cache,
184+
self.full_lanes_cache,
185+
self.full_centerlines_cache,
186+
]
187+
)
177188
np.save(cache_path, static_cache, allow_pickle=False)
178-
LOGGER.info(f"Saved static layers to cache file: {cache_path}")
189+
LOGGER.info(f"Saved static layers to cache file: {cache_path}")
179190

180191
def parametrized_cache_path(self) -> str:
181-
cache_dir = Path("birdview_v2_cache")
192+
cache_dir = Path("birdview_v3_cache")
182193
cache_dir.mkdir(parents=True, exist_ok=True)
183194
opendrive_content_hash = cache.generate_opendrive_content_hash(self._map)
184195
cache_filename = (
185196
f"{self._map.name}__"
186-
f"px_per_meter={self._pixels_per_meter}__"
197+
f"px_per_meter={self.pixels_per_meter}__"
187198
f"opendrive_hash={opendrive_content_hash}__"
188199
f"margin={mask.MAP_BOUNDARY_MARGIN}.npy"
189200
)
190201
return str(cache_dir / cache_filename)
191202

192-
def produce(
193-
self,
194-
agent_vehicle: carla.Actor,
195-
) -> BirdView:
203+
def produce(self, agent_vehicle: carla.Actor) -> BirdView:
196204
all_actors = actors.query_all(world=self._world)
197205
segregated_actors = actors.segregate_by_type(actors=all_actors)
198206
agent_vehicle_loc = agent_vehicle.get_location()
@@ -233,20 +241,21 @@ def produce(
233241
self.masks_generator.enable_local_rendering_mode(rendering_window)
234242
masks = self._render_actors_masks(agent_vehicle, segregated_actors, masks)
235243
cropped_masks = self.apply_agent_following_transformation_to_masks(
236-
agent_vehicle, masks,
244+
agent_vehicle, masks
237245
)
238246
ordered_indices = [mask.value for mask in BirdViewMasks.bottom_to_top()]
239-
return cropped_masks[ordered_indices]
247+
return cropped_masks[:, :, ordered_indices]
240248

241249
@staticmethod
242250
def as_rgb(birdview: BirdView) -> RgbCanvas:
243-
_, h, w = birdview.shape
251+
h, w, d = birdview.shape
252+
assert d == len(BirdViewMasks)
244253
rgb_canvas = np.zeros(shape=(h, w, 3), dtype=np.uint8)
245254
nonzero_indices = lambda arr: arr == COLOR_ON
246255

247256
for mask_type in BirdViewMasks.bottom_to_top():
248257
rgb_color = RGB_BY_MASK[mask_type]
249-
mask = birdview[mask_type]
258+
mask = birdview[:, :, mask_type]
250259
# If mask above contains 0, don't overwrite content of canvas (0 indicates transparency)
251260
rgb_canvas[nonzero_indices(mask)] = rgb_color
252261
return rgb_canvas
@@ -279,8 +288,9 @@ def _render_actors_masks(
279288
return masks
280289

281290
def apply_agent_following_transformation_to_masks(
282-
self, agent_vehicle: carla.Actor, masks: np.ndarray,
291+
self, agent_vehicle: carla.Actor, masks: np.ndarray
283292
) -> np.ndarray:
293+
"""Returns image of shape: height, width, channels"""
284294
agent_transform = agent_vehicle.get_transform()
285295
angle = (
286296
agent_transform.rotation.yaw + 90
@@ -296,13 +306,14 @@ def apply_agent_following_transformation_to_masks(
296306
crop_with_car_in_the_center, axes=(1, 2, 0)
297307
)
298308
rotated = rotate(crop_with_centered_car, angle, center=rotation_center)
299-
rotated = np.transpose(rotated, axes=(2, 0, 1))
300309

301310
half_width = self.target_size.width // 2
302311
hslice = slice(rotation_center.x - half_width, rotation_center.x + half_width)
303312

304313
if self._crop_type is BirdViewCropType.FRONT_AREA_ONLY:
305-
vslice = slice(rotation_center.y - self.target_size.height, rotation_center.y)
314+
vslice = slice(
315+
rotation_center.y - self.target_size.height, rotation_center.y
316+
)
306317
elif self._crop_type is BirdViewCropType.FRONT_AND_REAR_AREA:
307318
half_height = self.target_size.height // 2
308319
vslice = slice(
@@ -313,5 +324,5 @@ def apply_agent_following_transformation_to_masks(
313324
assert (
314325
vslice.start > 0 and hslice.start > 0
315326
), "Trying to access negative indexes is not allowed, check for calculation errors!"
316-
car_on_the_bottom = rotated[:, vslice, hslice]
327+
car_on_the_bottom = rotated[vslice, hslice]
317328
return car_on_the_bottom

carla_birdeye_view/__main__.py

+1
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ def main():
4848
PixelDimensions(width=DEFAULT_WIDTH, height=DEFAULT_HEIGHT),
4949
pixels_per_meter=4,
5050
crop_type=BirdViewCropType.FRONT_AND_REAR_AREA,
51+
render_lanes_on_junctions=False,
5152
)
5253
stuck_frames_count = 0
5354

carla_birdeye_view/mask.py

+24-20
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,9 @@ class MapMaskGenerator:
7676
to become a regular RGB renderer (just change all `color` arguments to 3-element tuples)
7777
"""
7878

79-
def __init__(self, client, pixels_per_meter: int) -> None:
79+
def __init__(
80+
self, client, pixels_per_meter: int, render_lanes_on_junctions: bool
81+
) -> None:
8082
self.client = client
8183
self.pixels_per_meter = pixels_per_meter
8284
self.rendering_window: Optional[RenderingWindow] = None
@@ -88,6 +90,7 @@ def __init__(self, client, pixels_per_meter: int) -> None:
8890
self._map_boundaries = self._find_map_boundaries()
8991
self._each_road_waypoints = self._generate_road_waypoints()
9092
self._mask_size: PixelDimensions = self.calculate_mask_size()
93+
self._render_lanes_on_junctions = render_lanes_on_junctions
9194

9295
def _find_map_boundaries(self) -> MapBoundaries:
9396
"""Find extreme locations on a map.
@@ -226,31 +229,32 @@ def road_mask(self) -> Mask:
226229
def lanes_mask(self) -> Mask:
227230
canvas = self.make_empty_mask()
228231
for road_waypoints in self._each_road_waypoints:
229-
# if not road_waypoints[0].is_junction:
230-
# NOTE This block was inside if statement - some junctions may not have proper lane markings drawn
231-
# Left Side
232-
lanes.draw_lane_marking_single_side(
233-
canvas,
234-
road_waypoints,
235-
side=LaneSide.LEFT,
236-
location_to_pixel_func=self.location_to_pixel,
237-
color=COLOR_ON,
238-
)
232+
if self._render_lanes_on_junctions or not road_waypoints[0].is_junction:
233+
# Left Side
234+
lanes.draw_lane_marking_single_side(
235+
canvas,
236+
road_waypoints,
237+
side=LaneSide.LEFT,
238+
location_to_pixel_func=self.location_to_pixel,
239+
color=COLOR_ON,
240+
)
239241

240-
# Right Side
241-
lanes.draw_lane_marking_single_side(
242-
canvas,
243-
road_waypoints,
244-
side=LaneSide.RIGHT,
245-
location_to_pixel_func=self.location_to_pixel,
246-
color=COLOR_ON,
247-
)
242+
# Right Side
243+
lanes.draw_lane_marking_single_side(
244+
canvas,
245+
road_waypoints,
246+
side=LaneSide.RIGHT,
247+
location_to_pixel_func=self.location_to_pixel,
248+
color=COLOR_ON,
249+
)
248250
return canvas
249251

250252
def centerlines_mask(self) -> Mask:
251253
canvas = self.make_empty_mask()
252254
for road_waypoints in self._each_road_waypoints:
253-
polygon = [self.location_to_pixel(wp.transform.location) for wp in road_waypoints]
255+
polygon = [
256+
self.location_to_pixel(wp.transform.location) for wp in road_waypoints
257+
]
254258
if len(polygon) > 2:
255259
polygon = np.array([polygon], dtype=np.int32)
256260
cv.polylines(

0 commit comments

Comments
 (0)