full upload so not to lose anything important

This commit is contained in:
Jan Kowalczyk
2025-03-14 18:02:23 +01:00
parent 35fcfb7d5a
commit b824ff7482
33 changed files with 3539 additions and 353 deletions

View File

@@ -22,21 +22,6 @@ from rosbags.typesys.stores.ros1_noetic import (
from util import existing_path
def get_o3d_pointcloud(points: np.ndarray) -> o3d.geometry.PointCloud:
xyz_array = np.stack(
[
points["x"].astype(np.float64),
points["y"].astype(np.float64),
points["z"].astype(np.float64),
],
axis=-1,
)
filtered_xyz = xyz_array[~np.all(xyz_array == 0, axis=1)]
o3d_vector = o3d.utility.Vector3dVector(filtered_xyz)
return o3d.geometry.PointCloud(o3d_vector)
# Mapping of PointField datatypes to NumPy dtypes
POINTFIELD_DATATYPES = {
1: np.int8, # INT8
@@ -178,9 +163,9 @@ def main() -> int:
with AnyReader([args.input_experiment_path]) as reader:
connections = reader.connections
topics = dict(sorted({conn.topic: conn for conn in connections}.items()))
assert (
args.pointcloud_topic in topics
), f"Topic {args.pointcloud_topic} not found"
assert args.pointcloud_topic in topics, (
f"Topic {args.pointcloud_topic} not found"
)
original_types = {}
typestore = get_typestore(Stores.ROS1_NOETIC)
@@ -207,16 +192,78 @@ def main() -> int:
"Processing all messages", total=reader.message_count
)
i = 0
for connection, timestamp, rawdata in reader.messages():
if connection.topic == args.pointcloud_topic:
i += 1
# For the pointcloud topic, we need to modify the data
msg = reader.deserialize(rawdata, connection.msgtype)
original_pointcloud = read_pointcloud(msg)
cleaned_pointcloud = clean_pointcloud(original_pointcloud)
o3d_pcd = get_o3d_pointcloud(cleaned_pointcloud)
# o3d_pointcloud = o3d.geometry.PointCloud()
# xyz_points = np.zeros((cleaned_pointcloud.shape[0], 3))
# xyz_points[:, 0] = cleaned_pointcloud["x"]
# xyz_points[:, 1] = cleaned_pointcloud["y"]
# xyz_points[:, 2] = cleaned_pointcloud["z"]
# o3d_pointcloud.points = o3d.utility.Vector3dVector(xyz_points)
range_dtypes = {
500: ("range_smaller_500", "u1"),
800: ("range_smaller_800", "u1"),
1000: ("range_smaller_1000", "u1"),
1200: ("range_smaller_1200", "u1"),
}
# radius_outlier_dtypes = {
# # 10: ("radius_outlier_10", "u1"),
# 50: ("radius_outlier_50", "u1"),
# # 100: ("radius_outlier_100", "u1"),
# }
# statistical_outlier_dtypes = {
# # (20, 1.0): ("statisstical_outlier_20_1", "u1"),
# # (20, 2.0): ("statisstical_outlier_20_2", "u1"),
# (20, 4.0): ("statisstical_outlier_20_4", "u1"),
# }
edited_dtype = np.dtype(
cleaned_pointcloud.dtype.descr + list(range_dtypes.values())
# + list(radius_outlier_dtypes.values())
# + list(statistical_outlier_dtypes.values())
)
edited_pointcloud = np.zeros(
cleaned_pointcloud.shape, dtype=edited_dtype
)
for name in cleaned_pointcloud.dtype.names:
edited_pointcloud[name] = cleaned_pointcloud[name]
for key, val in range_dtypes.items():
edited_pointcloud[val[0]][
edited_pointcloud["range"] < key
] = 255
# for key, val in radius_outlier_dtypes.items():
# _, mask_indices = o3d_pointcloud.remove_radius_outlier(
# nb_points=2, radius=key
# )
# mask = np.zeros(edited_pointcloud.shape[0], dtype=bool)
# mask[mask_indices] = True
# edited_pointcloud[val[0]][mask] = 255
# for key, val in statistical_outlier_dtypes.items():
# _, mask_indices = o3d_pointcloud.remove_statistical_outlier(
# nb_neighbors=key[0], std_ratio=key[1]
# )
# mask = np.zeros(edited_pointcloud.shape[0], dtype=bool)
# mask[mask_indices] = True
# edited_pointcloud[val[0]][mask] = 255
msg = create_pointcloud2_msg(msg, edited_pointcloud)
msg = create_pointcloud2_msg(msg, cleaned_pointcloud)
writer.write(
connection=new_connections[connection.id],
timestamp=timestamp,
@@ -224,6 +271,16 @@ def main() -> int:
message=msg, typename=msg.__msgtype__
),
)
# Create a boolean mask where the condition is met
# mask = cleaned_pointcloud["range"] > 1
# Use the mask to set fields to zero
# cleaned_pointcloud["x"][mask] = 0
# cleaned_pointcloud["y"][mask] = 0
# cleaned_pointcloud["z"][mask] = 0
# cleaned_pointcloud["range"][mask] = 0
else:
# For all other topics, we can write rawdata directly, no need to deserialize
writer.write(new_connections[connection.id], timestamp, rawdata)