Search code examples
pythonnumpyhdf5ply-file-format

how to convert .pts or .npy file into .ply or .h5 file?


I have 3d point cloud data as .npy file and .pts data.

To use these data for 3d classification neural net, I have to change these data to .h5 file.

So, first I am trying to convert .npy or .pts file to .ply file using python.

Could you refer to me example codes or help me for converting file format?

Also, I will really appreciate for ways to convert .ply to .h5 format..

Sorry for my poor english skills.


Solution

  • I hope this code will get you started, it shows how to create a h5 file from a npy (or random points). Warning the name of group and dataset are arbitrary (it's an example).

    import os
    import h5py
    import numpy as np
    
    # reading or creating an array of points numpy style
    def create_or_load_random_points_npy(file_radix, size, min, max):
        if os.path.exists(file_radix+'.npy'):
            arr = np.load(file_radix+'.npy')
        else:
            arr = np.random.uniform(min, max, (size,3))
            np.save(file_radix, arr)
        return arr
    
    
    # converting a numpy array (size,3) to a h5 file with two groups representng two way
    # of serializing points
    def convert_array_2_shades_of_grey(file_radix, arr):
        file = h5py.File(file_radix + '.h5', 'w')
        #only one dataset in a group
        group = file.create_group("single_dataset")
        group.attrs["desc"]=np.string_("random points in a single dataset")
        dset=group.create_dataset("points", (len(arr), len(arr[0])), h5py.h5t.NATIVE_DOUBLE)
        dset[...]=arr
        #create a dataset for each coordinate
        group = file.create_group("several_datasets")
        group.attrs["desc"] = np.string_("random points in a several coordinates (one for each coord)")
        dset = group.create_dataset("x", (len(arr),), h5py.h5t.NATIVE_DOUBLE)
        dset[...] = arr[:, 0]
        dset = group.create_dataset("y", (len(arr),), h5py.h5t.NATIVE_DOUBLE)
        dset[...] = arr[:, 1]
        dset = group.create_dataset("z", (len(arr),), h5py.h5t.NATIVE_DOUBLE)
        dset[...] = arr[:, 2]
    
    # loads the h5 file, choose which way of storing you would like to deserialize
    def load_random_points_h5(file_radix, single=True):
        file = h5py.File(file_radix + '.h5', 'r')
        if single:
            group = file["single_dataset"]
            print 'reading -> ', group.attrs["desc"]
            dset=group["points"]
            arr = dset[...]
        else:
            group = file["several_datasets"]
            print 'reading -> ', group.attrs["desc"]
            dset = group["x"]
            arr = np.zeros((dset.size, 3))
            arr[:, 0] = dset[...]
            dset = group["y"]
            arr[:, 1] = dset[...]
            dset = group["z"]
            arr[:, 2] = dset[...]
        return arr
    
    # And now we test !!!
    file_radix = 'test'
    # create or load the npy file
    arr =  create_or_load_random_points_npy(file_radix, 10000, -100.0, 100.0)
    # Well, well, what is in the box ?
    print arr
    
    # converting numpy array to h5
    convert_array_2_shades_of_grey(file_radix, arr)
    
    # loading single dataset style.
    arr = load_random_points_h5(file_radix, True)
    # Well, well, what is in the box ?
    print arr
    # loading several dataset style.
    arr = load_random_points_h5(file_radix, False)
    # Well, well, what is in the box ?
    print arr
    

    To view the content of the h5 file, download HDFview.

    h5 file content

    Also don't hesitate to look at the h5py doc.

    And last but not least, you can always ask question to the HDF5 community at HDFgroup forum (they deliver shiny badges like SO, waouh !!!)