segmant segmentation predict_labels my own point cloud by using predefind kitti model
Checklist
- [X] I have searched for similar issues.
- [X] I have tested with the latest development wheel.
- [X] I have checked the release documentation and the latest documentation (for
masterbranch).
My Question
I try to predict_labels of my own point cloud. I used predefined kitti model. i run the script by used Cloab.
the output show all points have the same lable nu. 18. Sure that is not logic

` import os import open3d.ml as _ml3d import open3d.ml.torch as ml3d
cfg_file = "/content/Open3D-ML/ml3d/configs/randlanet_semantickitti.yml" cfg = _ml3d.utils.Config.load_from_file(cfg_file)
model = ml3d.models.RandLANet(**cfg.model)
datapath = '/content/data/velodyne'
dataset = ml3d.datasets.KITTI(dataset_path=datapath, use_cache = True)
pipeline = ml3d.pipelines.SemanticSegmentation(model=model, dataset = dataset, **cfg.pipeline)
download the weights.
ckpt_folder = "./logs/" os.makedirs(ckpt_folder, exist_ok=True) ckpt_path = ckpt_folder + "randlanet_semantickitti_202201071330utc.pth" randlanet_url = "https://storage.googleapis.com/open3d-releases/model-zoo/randlanet_semantickitti_202201071330utc.pth" if not os.path.exists(ckpt_path): cmd = "wget {} -O {}".format(randlanet_url, ckpt_path) os.system(cmd)
load the parameters.
pipeline.load_ckpt(ckpt_path=ckpt_path)
import numpy as np import open3d as o3d data_viz = {} filepath='/content/drive/MyDrive/L001.xyz'
#point_cloud= np.loadtxt(filepath,skiprows=1) point_cloud= np.loadtxt(filepath) pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(point_cloud[:,:3]) label = np.zeros((len(pcd.points), 1))
data = { 'point': np.asarray(pcd.points) , # your point cloud, an nx3 numpy array 'label': label }
returns dict with 'predict_labels' and 'predict_scores'.
result = pipeline.run_inference(data)
from numpy.lib.arraysetops import unique pred_label = (result['predict_labels'] ).astype(np.int32) pred_label
`
Hi~ I got the same issues as you. Do you solve this problem yet?
I Have the same issue with the pretrained model on s3dis. On the test data it works fine, but on custom data it does not. My custom point cloud is downsampled to 0.01m voxel size.
`cfg_file = "randlanet_s3dis.yml" cfg = _ml3d.utils.Config.load_from_file(cfg_file)
model = ml3d.models.RandLANet(**cfg.model)
cfg.dataset['dataset_path'] = '~/Pointclouds/S3DIS/Stanford3dDataset_v1.2_Aligned_Version/'
dataset = ml3d.datasets.S3DIS(cfg.dataset.pop('dataset_path', None), **cfg.dataset)
pipeline = ml3d.pipelines.SemanticSegmentation(model, dataset=dataset, device="gpu", **cfg.pipeline)
# download the weights.
ckpt_folder = "./logs/"
os.makedirs(ckpt_folder, exist_ok=True)
ckpt_path = ckpt_folder + "randlanet_s3dis_202201071330utc.pth"
randlanet_url = "https://storage.googleapis.com/open3d-releases/model-zoo/randlanet_s3dis_202201071330utc.pth"
if not os.path.exists(ckpt_path):
cmd = "wget {} -O {}".format(randlanet_url, ckpt_path)
os.system(cmd)
# load the parameters.
pipeline.load_ckpt(ckpt_path=ckpt_path)
# load custom point cloud
pcd = o3d.io.read_point_cloud("data/pc.ply")
pcd.remove_non_finite_points(
data = {
"point" : np.asarray(pcd.points, dtype=np.float32),
"feat" : np.asarray(pcd.colors, dtype=np.float32) * np.float32(255.),
"label" : np.zeros((len(pcd.points),), dtype=np.int32)
}
result = pipeline.run_inference(data)
pc = [{
"name": "pc",
"points": data["point"],
"labels": result["predict_labels"]
}]
vis = ml3d.vis.Visualizer()
vis.visualize(pc)`