forked from philwilkes/rxp-pipeline
-
Notifications
You must be signed in to change notification settings - Fork 0
/
rxp2ply.py
185 lines (150 loc) · 7.93 KB
/
rxp2ply.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
import sys
import os
import glob
import multiprocessing
import json
import argparse
import pandas as pd
import numpy as np
import ply_io
import pdal
def tile_data(scan_pos, args):
# try:
base, scan = os.path.split(scan_pos)
try:
if args.test:
rxp = glob.glob(os.path.join(base, scan, '??????_??????.mon.rxp'))[0]
else:
rxp = glob.glob(os.path.join(base, scan, 'scans' if base.endswith('.PROJ') else '', '??????_??????.rxp'))[0]
except:
if args.verbose: print(f"!!! Can't find {os.path.join(base, scan, '??????_??????.rxp')} !!!")
return
sp = int(scan.replace(args.prefix, '').replace('.SCNPOS', ''))
if args.verbose:
with args.Lock:
print('rxp -> xyz:', rxp)
fn_matrix = glob.glob(os.path.join(args.matrix_dir, f'{scan.replace(".SCNPOS", "")}.*'))
if len(fn_matrix) == 0:
if args.verbose: print('!!! Can not find rotation matrix', os.path.join(args.matrix_dir, scan.replace(".SCNPOS", "") + ".*"), '!!!')
return
matrix = np.dot(args.global_matrix, np.loadtxt(fn_matrix[0]))
st_matrix = ' '.join(matrix.flatten().astype(str))
cmds = []
# pdal commands as dictionaries
read_in = {"type":"readers.rxp",
"filename": rxp,
"sync_to_pps": "false",
"reflectance_as_intensity": "false"}
cmds.append(read_in)
dev_filter = {"type":"filters.range",
"limits":"Deviation[0:{}]".format(args.deviation)}
cmds.append(dev_filter)
refl_filter = {"type":"filters.range",
"limits":"Reflectance[{}:{}]".format(*args.reflectance)}
cmds.append(refl_filter)
transform = {"type":"filters.transformation",
"matrix":st_matrix}
cmds.append(transform)
tile = {"type":"filters.splitter",
"length":f"{args.tile}",
"origin_x":"0",
"origin_y":"0"}
cmds.append(tile)
# link commmands and pass to pdal
JSON = json.dumps(cmds)
pipeline = pdal.Pipeline(JSON)
pipeline.execute()
# iterate over tiled arrays
for arr in pipeline.arrays:
arr = pd.DataFrame(arr)
arr = arr.rename(columns={'X':'x', 'Y':'y', 'Z':'z'})
#arr.columns = ['x', 'y', 'z', 'InternalTime', 'ReturnNumber', 'NumberOfReturns',
# 'amp', 'refl', 'EchoRange', 'dev', 'BackgroundRadiation',
# 'IsPpsLocked', 'EdgeOfFlightLine']
arr.loc[:, 'sp'] = sp
arr = arr[['x', 'y', 'z', 'Reflectance', 'Deviation', 'ReturnNumber', 'NumberOfReturns', 'sp']] # save only relevant fields
# remove points outside bbox
arr = arr.loc[(arr.x.between(args.bbox[0], args.bbox[1])) &
(arr.y.between(args.bbox[2], args.bbox[3]))]
if len(arr) == 0: continue
# identify tile number
X, Y = (arr[['x', 'y']].min() // args.tile * args.tile).astype(int)
tile = args.tiles.loc[(args.tiles.x == X) & (args.tiles.y == Y)]
# save to xyz file
with args.Lock:
with open(os.path.join(args.odir, f'{args.plot_code}{tile.tile.item():03}.xyz'), 'ab') as fh:
fh.write(arr.to_records(index=False).tobytes())
# except:
# print('!!!!', scan_pos, '!!!!')
def xyz2ply(xyz_path, args):
if args.verbose:
with args.Lock:
print('xyz -> ply:', xyz_path)
open_file = open(xyz_path, encoding='ISO-8859-1')
tmp = pd.DataFrame(np.fromfile(open_file, dtype='float64,float64,float64,float32,float32,uint8,uint8,int64'))
tmp.columns = ['x', 'y', 'z', 'refl', 'dev', 'ReturnNumber', 'NumberOfReturns', 'sp']
ply_io.write_ply(xyz_path.replace('.xyz', '.ply'), tmp)
os.unlink(xyz_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--project', '-p', required=True, type=str, help='path to point cloud')
parser.add_argument('--plot-code', type=str, default='', help='plot suffix')
parser.add_argument('--odir', type=str, default='.', help='output directory')
parser.add_argument('--deviation', type=float, default=15, help='deviation filter')
parser.add_argument('--reflectance', type=float, nargs=2, default=[-999, 999], help='reflectance filter')
parser.add_argument('--tile', type=float, default=10, help='length of tile')
parser.add_argument('--num-prcs', type=int, default=10, help='number of cores to use')
parser.add_argument('--prefix', type=str, default='ScanPos', help='file name prefix, deafult:ScanPos')
parser.add_argument('--bbox', type=int, nargs=4, default=[], help='bounding box format xmin xmax ymin ymax')
parser.add_argument('--matrix-dir', '-m', type=str, default='', help='path to rotation matrices')
parser.add_argument('--global-matrix', type=str, default=False, help='path to global rotation matrix')
parser.add_argument('--pos', default=[], nargs='*', help='process using specific scan positions')
parser.add_argument('--test', action='store_true', help='test using the .mon.rxp')
parser.add_argument('--verbose', action='store_true', help='print something')
parser.add_argument('--print-bbox-only', action='store_true', help='print bounding box only')
args = parser.parse_args()
# find matrices
if len(args.matrix_dir) == 0: args.matrix_dir = os.path.join(args.project, 'matrix')
if not os.path.isdir(args.matrix_dir): raise Exception(f'no such directory: {args.matrix_dir}')
args.ScanPos = sorted(glob.glob(os.path.join(args.project, f'{args.prefix}*')))
if args.plot_code != '': args.plot_code += '_'
# generate bounding box from matrix
M = glob.glob(os.path.join(args.matrix_dir, f'{args.prefix}*.*'))
if len(M) == 0:
raise Exception('no matrix files found, ensure they are named correctly')
matrix_arr = np.zeros((len(M), 3))
for i, m in enumerate(M):
matrix_arr[i, :] = np.loadtxt(m)[:3, 3]
# bbox [xmin, xmax, ymin, ymax]
if len(args.bbox) == 0:
args.bbox = np.array([matrix_arr.min(axis=0)[:2] - args.tile,
matrix_arr.max(axis=0)[:2] + (args.tile * 2)]).T.flatten() // args.tile * args.tile
if args.verbose: print('bounding box:', args.bbox)
if args.print_bbox_only: sys.exit()
# global rotation matrix
if args.global_matrix:
args.global_matrix = np.loadtxt(args.global_matrix)
else: args.global_matrix = np.identity(4)
# create tile db
X, Y = np.meshgrid(np.arange(args.bbox[0], args.bbox[1] + args.tile, args.tile),
np.arange(args.bbox[2], args.bbox[3] + args.tile, args.tile))
args.tiles = pd.DataFrame(data=np.vstack([X.flatten(), Y.flatten()]).T.astype(int), columns=['x', 'y'])
#args.tiles.loc[:, 'tile'] = [os.path.abspath(os.path.join(args.rxp2ply_dir, f'{t:03}.ply')) for t in range(len(args.tiles))]
#args.tiles.loc[:, 'tile'] = [f'{n:03}' for n in range(len(args.tiles))]
args.tiles.loc[:, 'tile'] = range(len(args.tiles))
if len(args.pos) > 0:
if args.verbose: print('processing only:', args.pos)
args.ScanPos = [os.path.join(args.project, p) for p in args.pos]
# read in and tile scans
Pool = multiprocessing.Pool(args.num_prcs)
m = multiprocessing.Manager()
args.Lock = m.Lock()
Pool.starmap(tile_data, [(sp, args) for sp in np.sort(args.ScanPos)])
# write to ply - reusing Pool
xyz = glob.glob(os.path.join(args.odir, '*.xyz'))
Pool.starmap_async(xyz2ply, [(xyz, args) for xyz in np.sort(xyz)])
Pool.close()
Pool.join()
# write tile index
#args.tiles[['tile', 'x', 'y']].to_csv(os.path.join(args.odir, 'tile_index.dat'),
# sep=' ', index=False, header=False)