Skip to content
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,11 @@
*.pyc
pypcd.egg-info
build
dist
bin
lib
include
.Python
.idea
pip-selfcheck.json
.pytest_cache/
6 changes: 3 additions & 3 deletions pypcd/pdutil.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pypcd

from pypcd import pypcd
from pypcd import numpy_pc2
def data_frame_to_point_cloud(df):
""" create a PointCloud object from a dataframe.
"""
Expand Down Expand Up @@ -28,4 +28,4 @@ def data_frame_to_point_cloud(df):

def data_frame_to_message(df, stamp=None, frame_id=None):
pc_data = df.to_records(index=False)
return pypcd.numpy_pc2.array_to_pointcloud2(pc_data, stamp=stamp, frame_id=frame_id)
return numpy_pc2.array_to_pointcloud2(pc_data, stamp=stamp, frame_id=frame_id)
34 changes: 19 additions & 15 deletions pypcd/pypcd.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@
import re
import struct
import copy
import cStringIO as sio
from io import StringIO as sio
import numpy as np
import warnings
import lzf

HAS_SENSOR_MSGS = True
try:
from sensor_msgs.msg import PointField
import numpy_pc2 # needs sensor_msgs
from . import numpy_pc2 # needs sensor_msgs
except ImportError:
HAS_SENSOR_MSGS = False

Expand Down Expand Up @@ -77,6 +77,8 @@ def parse_header(lines):
for ln in lines:
if ln.startswith('#') or len(ln) < 2:
continue
ln = ln.replace(' _ ',' s ',1)
ln = ln.replace(' _ ',' m ',1)
match = re.match('(\w+)\s+([\w\s\.]+)', ln)
if not match:
warnings.warn("warning: can't understand line: %s" % ln)
Expand All @@ -87,11 +89,11 @@ def parse_header(lines):
elif key in ('fields', 'type'):
metadata[key] = value.split()
elif key in ('size', 'count'):
metadata[key] = map(int, value.split())
metadata[key] = list(map(int, value.split()))
elif key in ('width', 'height', 'points'):
metadata[key] = int(value)
elif key == 'viewpoint':
metadata[key] = map(float, value.split())
metadata[key] = list(map(float, value.split()))
elif key == 'data':
metadata[key] = value.strip().lower()
# TODO apparently count is not required?
Expand Down Expand Up @@ -199,9 +201,9 @@ def _build_dtype(metadata):
fieldnames.append(f)
typenames.append(np_type)
else:
fieldnames.extend(['%s_%04d' % (f, i) for i in xrange(c)])
fieldnames.extend(['%s_%04d' % (f, i) for i in range(c)])
typenames.extend([np_type]*c)
dtype = np.dtype(zip(fieldnames, typenames))
dtype = np.dtype(list(zip(fieldnames, typenames)))
return dtype


Expand Down Expand Up @@ -267,6 +269,8 @@ def point_cloud_from_fileobj(f):
header = []
while True:
ln = f.readline().strip()
if not isinstance(ln, str):
ln = ln.decode('utf-8')
header.append(ln)
if ln.startswith('DATA'):
metadata = parse_header(header)
Expand Down Expand Up @@ -309,13 +313,13 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
assert(data_compression in ('ascii', 'binary', 'binary_compressed'))
metadata['data'] = data_compression

header = write_header(metadata)
header = write_header(metadata).encode('utf-8')
fileobj.write(header)
if metadata['data'].lower() == 'ascii':
fmtstr = build_ascii_fmtstr(pc)
np.savetxt(fileobj, pc.pc_data, fmt=fmtstr)
elif metadata['data'].lower() == 'binary':
fileobj.write(pc.pc_data.tostring('C'))
fileobj.write(pc.pc_data.tostring())
elif metadata['data'].lower() == 'binary_compressed':
# TODO
# a '_' field is ignored by pcl and breakes compressed point clouds.
Expand All @@ -324,9 +328,9 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
# reorder to column-by-column
uncompressed_lst = []
for fieldname in pc.pc_data.dtype.names:
column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring('C')
column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring()
uncompressed_lst.append(column)
uncompressed = ''.join(uncompressed_lst)
uncompressed = b''.join(uncompressed_lst)
uncompressed_size = len(uncompressed)
# print("uncompressed_size = %r"%(uncompressed_size))
buf = lzf.compress(uncompressed)
Expand Down Expand Up @@ -359,19 +363,19 @@ def point_cloud_to_buffer(pc, data_compression=None):
def save_point_cloud(pc, fname):
""" save pointcloud to fname in ascii format
"""
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f, 'ascii')


def save_point_cloud_bin(pc, fname):
""" save pointcloud to fname in binary format
"""
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f, 'binary')


def save_point_cloud_bin_compressed(pc, fname):
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f, 'binary_compressed')


Expand Down Expand Up @@ -461,7 +465,7 @@ def add_fields(pc, metadata, pc_data):
else:
fieldnames.extend(['%s_%04d' % (f, i) for i in xrange(c)])
typenames.extend([np_type]*c)
dtype = zip(fieldnames, typenames)
dtype = list(zip(fieldnames, typenames))
# new dtype. could be inferred?
new_dtype = [(f, pc.pc_data.dtype[f])
for f in pc.pc_data.dtype.names] + dtype
Expand Down Expand Up @@ -631,7 +635,7 @@ def save_pcd(self, fname, compression=None, **kwargs):
warnings.warn('data_compression keyword is deprecated for'
' compression')
compression = kwargs['data_compression']
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(self, f, compression)

def save_pcd_to_fileobj(self, fileobj, compression=None, **kwargs):
Expand Down
18 changes: 9 additions & 9 deletions pypcd/tests/test_pypcd.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def test_parse_header():


def test_from_path(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)

fields = 'x y z normal_x normal_y normal_z curvature boundary k vp_x vp_y vp_z principal_curvature_x principal_curvature_y principal_curvature_z pc1 pc2'.split()
Expand All @@ -88,7 +88,7 @@ def test_from_path(pcd_fname):


def test_add_fields(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)

old_md = pc.get_metadata()
Expand All @@ -107,7 +107,7 @@ def test_add_fields(pcd_fname):


def test_path_roundtrip_ascii(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
md = pc.get_metadata()

Expand All @@ -131,7 +131,7 @@ def test_path_roundtrip_ascii(pcd_fname):


def test_path_roundtrip_binary(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
md = pc.get_metadata()

Expand All @@ -145,7 +145,7 @@ def test_path_roundtrip_binary(pcd_fname):

pc2 = pypcd.PointCloud.from_path(tmp_fname)
md2 = pc2.get_metadata()
for k, v in md2.iteritems():
for k, v in md2.items():
if k == 'data':
assert v == 'binary'
else:
Expand All @@ -159,7 +159,7 @@ def test_path_roundtrip_binary(pcd_fname):


def test_path_roundtrip_binary_compressed(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
md = pc.get_metadata()

Expand All @@ -173,7 +173,7 @@ def test_path_roundtrip_binary_compressed(pcd_fname):

pc2 = pypcd.PointCloud.from_path(tmp_fname)
md2 = pc2.get_metadata()
for k, v in md2.iteritems():
for k, v in md2.items():
if k == 'data':
assert v == 'binary_compressed'
else:
Expand All @@ -186,7 +186,7 @@ def test_path_roundtrip_binary_compressed(pcd_fname):


def test_cat_pointclouds(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
pc2 = pc.copy()
pc2.pc_data['x'] += 0.1
Expand All @@ -196,7 +196,7 @@ def test_cat_pointclouds(pcd_fname):
assert(pc3.width == pc.width+pc2.width)

def test_ascii_bin1(ascii_pcd_fname, bin_pcd_fname):
import pypcd
from pypcd import pypcd
apc1 = pypcd.point_cloud_from_path(ascii_pcd_fname)
bpc1 = pypcd.point_cloud_from_path(bin_pcd_fname)
am = cloud_centroid(apc1)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from distutils.core import setup
from setuptools import setup

# Get version and release info, which is all stored in pypcd/version.py
ver_file = os.path.join('pypcd', 'version.py')
Expand Down