Commit f90f00ef authored by luroth's avatar luroth
Browse files

.

parent b0990b2f
/voronoi.pyc
PROJCS["CH1903+/LV95",GEOGCS["CH1903+",DATUM["CH1903",SPHEROID["Bessel1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],TOWGS84[674.374,15.056,405.346,0,0,0,0],AUTHORITY["EPSG","6150"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4150"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Hotine_Oblique_Mercator"],PARAMETER["latitude_of_center",46.95240555555556],PARAMETER["longitude_of_center",7.439583333333333],PARAMETER["azimuth",90],PARAMETER["rectified_grid_angle",90],PARAMETER["scale_factor",1],PARAMETER["false_easting",2600000],PARAMETER["false_northing",1200000],AUTHORITY["EPSG","2056"],AXIS["Y",EAST],AXIS["X",NORTH]]
\ No newline at end of file
"""
Convert an LAS LIDAR file to a shapefile
by creating a 3D triangle mesh using
Delaunay Triangulation.
"""
# cPickle is used to store
# tessalated triangles
# to save time writing
# future shapefiles
import cPickle
import os
import time
import math
# Third-party Python modules:
import numpy as np
import shapefile
from laspy.file import File
import voronoi
# Source LAS file
project_dir = '/home/luroth/git_repos/MSc_Lukas_Roth/'
project_dir = os.path.dirname(os.path.realpath(__file__))
source = project_dir + "/Test_Data/eschi.las"
# Output shapefile
target = project_dir + "/Test_Data/eschi_TIN"
# Triangles archive
archive = project_dir + "/Test_Data/triangles.p"
# Pyshp archive
pyshp = project_dir + "/Test_Data/mesh_pyshp.p"
# Point class required by
# the voronoi module
class Point:
def __init__(self,x,y):
self.px = x
self.py = y
def x(self):
return self.px
def y(self):
return self.py
# This will be the triangle
# array. Load it from a pickle
# file or use the voronoi module
# to create the triangles.
triangles = None
if os.path.exists(archive):
print "Loading triangle archive..."
f = open(archive, "rb")
triangles = cPickle.load(f)
f.close()
# Open LIDAR LAS file
las = File(source, mode="r")
else:
# Open LIDAR LAS file
las = File(source, mode="r")
points = []
print "Assembling points..."
# Pull points from LAS file
for x,y, classif in np.nditer((las.x,las.y, las.classification)):
if classif == 2:
points.append(Point(x,y))
print "Composing triangles..."
# Delaunay Triangulation
from scipy.spatial import Voronoi, voronoi_plot_2d
triangles = Voronoi(points)
#triangles = voronoi.computeDelaunayTriangulation(points)
# Save the triangles to save time if we write more than
# one shapefile.
f = open(archive, "wb")
cPickle.dump(triangles, f, protocol=2)
f.close()
print "Creating shapefile..."
w = None
if os.path.exists(pyshp):
f = open(pyshp, "rb")
w = cPickle.load(f)
f.close()
else:
# PolygonZ shapefile (x,y,z,m)
w = shapefile.Writer(shapefile.POLYGONZ)
w.field("X1", "C", "40")
w.field("X2", "C", "40")
w.field("X3", "C", "40")
w.field("Y1", "C", "40")
w.field("Y2", "C", "40")
w.field("Y3", "C", "40")
w.field("Z1", "C", "40")
w.field("Z2", "C", "40")
w.field("Z3", "C", "40")
tris = len(triangles)
# Loop through shapes and
# track progress every 10 percent
last_percent = 0
for i in range(tris):
t = triangles[i]
percent = int((i/(tris*1.0))*100.0)
if percent % 10.0 == 0 and percent > last_percent:
last_percent = percent
print "%s %% done - Shape %s/%s at %s" % (percent, i, tris, time.asctime())
part=[]
x1 = las.x[t[0]]
y1 = las.y[t[0]]
z1 = las.z[t[0]]
x2 = las.x[t[1]]
y2 = las.y[t[1]]
z2 = las.z[t[1]]
x3 = las.x[t[2]]
y3 = las.y[t[2]]
z3 = las.z[t[2]]
# Check segments for large triangles
# along the convex hull which is an common
# artificat in Delaunay triangulation
max = 3
if math.sqrt((x2-x1)**2+(y2-y1)**2) > max: continue
if math.sqrt((x3-x2)**2+(y3-y2)**2) > max: continue
if math.sqrt((x3-x1)**2+(y3-y1)**2) > max: continue
part.append([x1,y1,z1,0])
part.append([x2,y2,z2,0])
part.append([x3,y3,z3,0])
w.poly(parts=[part])
w.record(x1,x2,x3,y1,y2,y3,z1,z2,z3)
print "Saving shapefile..."
# Pickle the Writer in case something
# goes wrong. Be sure to delete this
# file to recreate teh shapefile.
f = open(pyshp, "wb")
cPickle.dump(w, f, protocol=2)
f.close()
w.save(target)
print "Done."
...@@ -9,35 +9,15 @@ import multiprocessing ...@@ -9,35 +9,15 @@ import multiprocessing
from itertools import repeat from itertools import repeat
import csv import csv
from urllib import request from urllib import request
import urllib3
import os import os
import warnings import warnings
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import shapefile import shapefile
def frange(start, stop, step): project_dir = '/home/luroth/git_repos/MSc_Lukas_Roth/'
i = start project_dir = os.path.realpath(__file__)
while i < stop:
yield i
i += step
def cartesian(arrays, out=None):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
arrays = [np.asarray(x) for x in arrays]
dtype = arrays[0].dtype
n = np.prod([x.size for x in arrays])
if out is None:
out = np.zeros([n, len(arrays)], dtype=dtype)
m = n / arrays[0].size
out[:,0] = np.repeat(arrays[0], m)
if arrays[1:]:
cartesian(arrays[1:], out=out[0:m,1:])
for j in range(1, arrays[0].size):
out[j*m:(j+1)*m,1:] = out[0:m,1:]
return out
def rotation_matrix(axis, theta): def rotation_matrix(axis, theta):
""" """
...@@ -56,10 +36,11 @@ def rotation_matrix(axis, theta): ...@@ -56,10 +36,11 @@ def rotation_matrix(axis, theta):
def getWKT_PRJ (epsg_code): def getWKT_PRJ (epsg_code):
wkt = request.urlopen('http://spatialreference.org/ref/epsg/{0}/prettywkt/'.format(epsg_code))
remove_spaces = wkt.read().decode("utf-8") with open(project_dir + '/Test_Data/EPSG' + str(epsg_code) + '.txt' , 'r') as file:
remove_spaces = remove_spaces.replace(' ', '') remove_spaces = file.read()
output = remove_spaces.replace("\n", "") remove_spaces = remove_spaces.replace(' ', '')
output = remove_spaces.replace("\n", "")
return output return output
def point_in_poly(x,y,poly): def point_in_poly(x,y,poly):
...@@ -80,8 +61,10 @@ def point_in_poly(x,y,poly): ...@@ -80,8 +61,10 @@ def point_in_poly(x,y,poly):
return inside return inside
def intersect_point_with_planes(plane_normales, plane_points, line_vector, line_point): def intersect_point_with_planes(plane_normales, plane_points, line_vector, line_point, corners):
# Based on http://geomalgorithms.com/a05-_intersect-1.html
epsilon=1e-6 epsilon=1e-6
for plane_index in range(plane_normales.shape[0]): for plane_index in range(plane_normales.shape[0]):
plane_normal = plane_normales[plane_index] plane_normal = plane_normales[plane_index]
...@@ -107,11 +90,13 @@ def intersect_point_with_planes(plane_normales, plane_points, line_vector, line_ ...@@ -107,11 +90,13 @@ def intersect_point_with_planes(plane_normales, plane_points, line_vector, line_
def main(): def main():
project_dir = r'E:\UAV\_Workspace_\ethz_eschikon_FIP_50m_soja_only_20160526\output\\' #project_dir = r'E:\UAV\_Workspace_\ethz_eschikon_FIP_50m_soja_only_20160526\output\\'
DEM_file = 'DEM_superrough.stl'
DEM_file = 'Test_Data/DEM_superrough.stl'
camera_position_file = 'Test_Data/camera_position.txt'
interpolation_size_x = 64 no_of_interpolation_points_x = 33
interpolation_size_y = 64 no_of_interpolation_points_y = 65
epsg = getWKT_PRJ("2056") epsg = getWKT_PRJ("2056")
...@@ -139,25 +124,22 @@ def main(): ...@@ -139,25 +124,22 @@ def main():
sensor_X_C3u4 = (sensor_dimension_x / 2000) sensor_X_C3u4 = (sensor_dimension_x / 2000)
sensor_Y_C1u4 = (sensor_dimension_y / 2000) sensor_Y_C1u4 = (sensor_dimension_y / 2000)
sensor_Y_C2u3 = -(sensor_dimension_y / 2000) sensor_Y_C2u3 = -(sensor_dimension_y / 2000)
sensor_corner_xy = np.array([[0,0],[sensor_resolution_x,0],[sensor_resolution_x,sensor_dimension_y],[0, sensor_resolution_y]])
sensor_corner_coords = np.array([[-(sensor_dimension_x / 2000), (sensor_dimension_y / 2000), focal_lengh/1000],
[-(sensor_dimension_x / 2000), -(sensor_dimension_y / 2000), focal_lengh/1000],
[ (sensor_dimension_x / 2000), -(sensor_dimension_y / 2000), focal_lengh/1000],
[ (sensor_dimension_x / 2000), (sensor_dimension_y / 2000), focal_lengh/1000]])
# Coordinates of sample pixels of sensor (later used for interpolation), relative to center # Coordinates of sample pixels of sensor (later used for interpolation), relative to center
sensor_pixels_x = [x for x in range(0,sensor_resolution_x,interpolation_size_x)] sensor_pixels_x = np.linspace(0, sensor_resolution_x, no_of_interpolation_points_x, dtype='int')
sensor_pixels_y = [y for y in range(0,sensor_resolution_y,interpolation_size_y)] sensor_pixels_y = np.linspace(0, sensor_resolution_y, no_of_interpolation_points_y, dtype='int')
sensor_pixels_xy = cartesian([sensor_pixels_x, sensor_pixels_y]) sensor_pixels_xy = np.transpose([np.tile(sensor_pixels_x, len(sensor_pixels_y)), np.repeat(sensor_pixels_y, len(sensor_pixels_x))])
sensor_pixel_coordx = [x/1000 for x in frange((-(sensor_dimension_x / 2)), (sensor_dimension_x /2), sensor_dimension_x/(sensor_resolution_x/interpolation_size_x))] corner_index = [0, (no_of_interpolation_points_x-1), (no_of_interpolation_points_x*no_of_interpolation_points_y-1), (no_of_interpolation_points_x*no_of_interpolation_points_y-no_of_interpolation_points_x)]
sensor_pixel_coordy = [y/1000 for y in frange((-(sensor_dimension_y / 2)), (sensor_dimension_y /2), sensor_dimension_y/(sensor_resolution_y/interpolation_size_y))]
sensor_pixels_coords_x = np.linspace(-(sensor_dimension_x / 2000), (sensor_dimension_x / 2000), no_of_interpolation_points_x, dtype=np.float64)
sensor_pixel_coords = cartesian([sensor_pixel_coordx, sensor_pixel_coordy, focal_lengh/1000]) sensor_pixels_coords_y = np.linspace(-(sensor_dimension_y / 2000), (sensor_dimension_y / 2000), no_of_interpolation_points_y, dtype=np.float64)
sensor_pixels_coords_xy = np.transpose([np.tile(sensor_pixels_coords_x, len(sensor_pixels_coords_y)), np.repeat(sensor_pixels_coords_y, len(sensor_pixels_coords_x))])
sensor_pixel_coords_z = np.full((sensor_pixels_coords_xy.shape[0], 1), sensor_Z, dtype=np.float64)
sensor_pixels_coords_xyz = np.concatenate((sensor_pixels_coords_xy,sensor_pixel_coords_z), axis=1)
# Calculate coordinates on ground for all cameras # Calculate coordinates on ground for all cameras
with open(project_dir + 'camera_position_RGB.txt', 'r') as csvfile: with open(project_dir + camera_position_file, 'r') as csvfile:
spamreader = csv.reader(csvfile, delimiter='\t', quotechar='|') spamreader = csv.reader(csvfile, delimiter='\t', quotechar='|')
next(spamreader, None) next(spamreader, None)
next(spamreader, None) next(spamreader, None)
...@@ -165,12 +147,11 @@ def main(): ...@@ -165,12 +147,11 @@ def main():
PhotoID = os.path.splitext(row[0])[0] PhotoID = os.path.splitext(row[0])[0]
print('PROCESSING', PhotoID) print('PROCESSING', PhotoID)
X = float(row[1])
Y = float(row[2]) X, Y, Z = (float(row[index]) for index in [1,2,3])
Z = float(row[3]) Omega, Phi, Kappa = (float(row[index]) for index in [4,5,6])
Omega = float(row[4]) # in radian
Phi = float(row[5]) omega_rad, phi_rad, kappa_rad = (math.radians(x) for x in [Omega, Phi, Kappa])
Kappa = float(row[6])
#PhotoID = 'DSC01520' #PhotoID = 'DSC01520'
#X = 2693786.2141234782000000 #X = 2693786.2141234782000000
...@@ -183,49 +164,35 @@ def main(): ...@@ -183,49 +164,35 @@ def main():
# Coordinates of focal point # Coordinates of focal point
focal_point = np.array([X, Y, Z]) focal_point = np.array([X, Y, Z])
# Rotation (order omega phi kappa correct?)
omega_rad = math.radians(Omega)
phi_rad = math.radians(Phi)
kappa_rad = math.radians(Kappa)
print("rotate camera") print("rotate camera")
rot_matrix = rotation_matrix(z_axis, kappa_rad).dot(rotation_matrix(y_axis, phi_rad)).dot(rotation_matrix(x_axis, omega_rad))
sensor_corner_coords_rot = sensor_corner_coords sensor_pixels_vectors = np.dot(sensor_pixels_coords_xyz, rot_matrix)
sensor_corner_coords_rot = np.array([np.dot(rotation_matrix(z_axis, kappa_rad), sensor_corner) for sensor_corner in sensor_corner_coords_rot])
sensor_corner_coords_rot = np.array([np.dot(rotation_matrix(y_axis, phi_rad), sensor_corner) for sensor_corner in sensor_corner_coords_rot])
sensor_corner_coords_rot = np.array([np.dot(rotation_matrix(x_axis, omega_rad), sensor_corner) for sensor_corner in sensor_corner_coords_rot])
sensor_pixel_coords_rot = sensor_pixel_coords
sensor_pixel_coords_rot = np.array([np.dot(rotation_matrix(z_axis, kappa_rad), sensor_corner) for sensor_corner in sensor_pixel_coords_rot])
sensor_pixel_coords_rot = np.array([np.dot(rotation_matrix(y_axis, phi_rad), sensor_corner) for sensor_corner in sensor_pixel_coords_rot])
sensor_pixel_coords_rot = np.array([np.dot(rotation_matrix(x_axis, omega_rad), sensor_corner) for sensor_corner in sensor_pixel_coords_rot])
print("calculate ground points for corners")
#ground_corner_coords = np.array([intersect_point_with_planes(DEM_normales, DEM_points, corner_vector, focal_point) for corner_vector in sensor_corner_coords_rot]) print("calculate ground points for corners")
ground_corner_coords = np.array(Parallel(n_jobs=4)(delayed(intersect_point_with_planes)(DEM_normales, DEM_points, corner_vector, focal_point) for corner_vector in sensor_corner_coords_rot)) sensor_corner_vectors = sensor_pixels_vectors[corner_index]
# add start point as end point to close polygon
ground_corner_coords = np.append(ground_corner_coords, [ground_corner_coords[0]], axis=0)
ground_corner_coords = np.array([intersect_point_with_planes(DEM_normales, DEM_points, sensor_corner_vector, focal_point) for sensor_corner_vector in sensor_corner_vectors])
#ground_corner_coords = np.array(Parallel(n_jobs=4)(delayed(intersect_point_with_planes)(DEM_normales, DEM_points, sensor_corner_vector, focal_point) for sensor_corner_vector in sensor_corner_vectors))
print("\ncorners found:", ground_corner_coords) print("\ncorners found:", ground_corner_coords)
if(np.isnan(ground_corner_coords).any()): if(np.isnan(ground_corner_coords).any()):
print("corners missing, skipping Photo") print("corners missing, skipping Photo")
continue continue
# write shapefile:
# add start point as end point to close polygon
corner_polygon = np.append(ground_corner_coords, [ground_corner_coords[0]], axis=0)
print("save corner polygon to shapefile") print("save corner polygon to shapefile")
shp = shapefile.Writer(shapeType=shapefile.POLYGON) shp = shapefile.Writer(shapeType=shapefile.POLYGON)
shp.field('label') shp.field('label')
shp.poly(parts=[ground_corner_coords.tolist()], shapeType=shapefile.POLYGON) shp.poly(parts=[corner_polygon.tolist()], shapeType=shapefile.POLYGON)
shp.record('a') shp.record('field_of_view')
shp.save(project_dir + PhotoID + '_corners') shp.save(project_dir + PhotoID + '_corners')
prj = open(project_dir + PhotoID + '_corners.prj', "w") prj = open(project_dir + PhotoID + '_corners.prj', "w")
prj.write(epsg) prj.write(epsg)
prj.close() prj.close()
print("calculate ground points for sample pixels") print("calculate ground points for all sample pixels")
# prepare DEM # prepare DEM
print("reducing DEM to corner polygon") print("reducing DEM to corner polygon")
DEM_normales_red = [] DEM_normales_red = []
......
solid Test
facet normal 0 0 1
outer loop
vertex 2683846 1246290 503
vertex 2793846 1246290 503
vertex 2693846 1296290 503
endloop
endfacet
endsolid Test
\ No newline at end of file
PROJCS["CH1903+ / LV95",
GEOGCS["CH1903+",
DATUM["CH1903",
SPHEROID["Bessel 1841",6377397.155,299.1528128,
AUTHORITY["EPSG","7004"]],
TOWGS84[674.374,15.056,405.346,0,0,0,0],
AUTHORITY["EPSG","6150"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4150"]],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]],
PROJECTION["Hotine_Oblique_Mercator"],
PARAMETER["latitude_of_center",46.95240555555556],
PARAMETER["longitude_of_center",7.439583333333333],
PARAMETER["azimuth",90],
PARAMETER["rectified_grid_angle",90],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",2600000],
PARAMETER["false_northing",1200000],
AUTHORITY["EPSG","2056"],
AXIS["Y",EAST],
AXIS["X",NORTH]]
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment