Commit 198053a8 authored by luroth's avatar luroth
Browse files

.

parent 1c4ba36f
'''
Created on 15.03.2017
@author: luroth
'''
#######################################
import math as math
import numpy as np
import geojson
from osgeo import ogr, gdal
from joblib import Parallel, delayed
import multiprocessing
from multiprocessing import Process, Queue, Manager
from itertools import repeat
import csv
import os
import sys
import getopt
import matplotlib.pyplot as plt
from matplotlib import path
from osgeo import gdalnumeric
import time
from progressbar import ProgressBar
from Common import *
def mask_worker(work_queue, result_queue, metadata_folder, output_directory, lot, soils):
for job in iter(work_queue.get, 'STOP'):
PhotoID = job['PhotoID']
create_mask(metadata_folder, output_directory, lot, soils, PhotoID)
result_queue.put(PhotoID)
def create_mask(metadata_folder, output_directory, lot, soils, PhotoID):
photo_meta_x_ds = gdal.Open(os.path.join(metadata_folder, PhotoID + '_metadata_coord_x.tif'))
photo_meta_x = np.array(photo_meta_x_ds.GetRasterBand(1).ReadAsArray(), dtype=np.float64)
photo_meta_x += DELTA_x
photo_meta_y_ds = gdal.Open(os.path.join(metadata_folder, PhotoID + '_metadata_coord_y.tif'))
photo_meta_y = np.array(photo_meta_y_ds.GetRasterBand(1).ReadAsArray(), dtype=np.float64)
photo_meta_y += DELTA_y
photo_meta_xy = np.stack((photo_meta_x, photo_meta_y), axis=2)
photo_meta_xy = photo_meta_xy.reshape(-1,2)
lot_path = path.Path(lot)
mask_lot = lot_path.contains_points(photo_meta_xy, radius=0.02)
if not np.any(mask_lot):
return
mask_lot = mask_lot.reshape(photo_meta_y.shape)
gdalnumeric.SaveArray(1*mask_lot, os.path.join(output_directory, PhotoID + '_mask_lot.tif'), format='GTiff')
mask_soils = np.empty(photo_meta_xy.shape[0:1], dtype=bool)
for soil in soils:
soil_path = path.Path(soil)
mask_soil = soil_path.contains_points(photo_meta_xy, radius=0.02)
mask_soils = mask_soils + mask_soil
mask_soils = mask_soils.reshape(photo_meta_y.shape)
output_mask = np.array(1*mask_soils, dtype=int)
gdalnumeric.SaveArray(output_mask, os.path.join(output_directory, PhotoID + '_mask_soil.tif'), format='GTiff')
def main(argv):
camera_position_file = ''
metadata_folder = ''
training_plant = ''
training_soil = ''
output_directory = ''
try:
opts, args = getopt.getopt(argv,"hc:m:p:s:o:")
except getopt.GetoptError:
print('test.py -c <camera position file> -m <metadata folder> -p <lot> -s <training file soil> -o <Output directory>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('test.py -c <camera position file> -m <metadata folder> -p <lot> -s <training file soil> -o <Output directory>')
sys.exit()
elif opt == "-c":
camera_position_file = arg
elif opt == "-m":
metadata_folder = arg
elif opt == "-p":
lot_dim = arg
elif opt == "-s":
training_soil = arg
elif opt == "-o":
output_directory = arg
print('Metdadata folder is', metadata_folder)
# graph output off
plt.ioff()
epsg = getWKT_PRJ("2056")
with open(lot_dim, 'r') as f:
lot_polygons = geojson.load(f)
lot = lot_polygons['features'][0]['geometry']['coordinates'][0]
soils = []
with open(training_soil, 'r') as f:
training_soil_polygons = geojson.load(f)
for feature in training_soil_polygons['features']:
soils.append(feature['geometry']['coordinates'][0])
with open(camera_position_file, 'r') as csvfile:
spamreader = csv.reader(csvfile, delimiter='\t', quotechar='|')
next(spamreader, None)
next(spamreader, None)
m = Manager()
jobs = m.Queue()
results = m.Queue()
processes = []
max_jobs = 0
count = 0
for row in spamreader:
PhotoID = os.path.splitext(row[0])[0]
if os.path.isfile(os.path.join(metadata_folder, PhotoID + '_metadata_coord_x.tif')):
job = dict()
job['PhotoID'] = PhotoID
jobs.put(job)
max_jobs += 1
progress = ProgressBar(min_value=0, max_value=max_jobs)
for w in range(multiprocessing.cpu_count()-2):
p = Process(target=mask_worker, args=(jobs, results, metadata_folder, output_directory, lot, soils))
p.daemon = True
p.start()
processes.append(p)
jobs.put('STOP')
print("jobs all started")
progress.update(0)
while count < (max_jobs):
results.get()
progress.update(count)
count+=1
progress.finish()
for p in processes:
p.join()
if __name__ == "__main__":
main(sys.argv[1:])
print("\n============\nAll masks prepared, END\n")
......@@ -2,7 +2,6 @@
import math as math
import numpy as np
import geojson
from osgeo import ogr
from stl import mesh
import stl.base as stl
from joblib import Parallel, delayed
......@@ -17,84 +16,13 @@ import warnings
import matplotlib.pyplot as plt
import shapefile
from scipy.interpolate import griddata
from osgeo import gdalnumeric
import gdal
import gdalnumeric
import time
import ctypes
#from shapely.iterops import intersects
#queue = Queue()
from Common import *
def rotation_matrix(axis, theta):
axis = np.asarray(axis)
axis = axis/math.sqrt(np.dot(axis, axis))
a = math.cos(theta/2.0)
b, c, d = -axis*math.sin(theta/2.0)
aa, bb, cc, dd = a*a, b*b, c*c, d*d
bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d
return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],
[2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],
[2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])
def find_mins_maxs(mesh):
minx = maxx = miny = maxy = minz = maxz = None
for p in mesh.points:
# p contains (x, y, z)
if minx is None:
minx = p[stl.Dimension.X]
maxx = p[stl.Dimension.X]
miny = p[stl.Dimension.Y]
maxy = p[stl.Dimension.Y]
minz = p[stl.Dimension.Z]
maxz = p[stl.Dimension.Z]
else:
maxx = max(p[stl.Dimension.X], maxx)
minx = min(p[stl.Dimension.X], minx)
maxy = max(p[stl.Dimension.Y], maxy)
miny = min(p[stl.Dimension.Y], miny)
maxz = max(p[stl.Dimension.Z], maxz)
minz = min(p[stl.Dimension.Z], minz)
return minx, maxx, miny, maxy, minz, maxz
def get_polygon_normale(points):
a = np.array(points[0])
b = np.array(points[1])
c = np.array(points[2])
v1 = a-b
v2 = a-c
normale = np.cross(v1, v2)
return normale
def getWKT_PRJ (epsg_code):
try:
file_dir = os.path.dirname(os.path.realpath(__file__))
except:
file_dir = '/home/luroth/git_repos/MSc_Lukas_Roth/'
with open(os.path.join(file_dir, 'EPSG_files', 'EPSG' + str(epsg_code) + '.txt'), 'r') as file:
remove_spaces = file.read()
remove_spaces = remove_spaces.replace(' ', '')
output = remove_spaces.replace("\n", "")
return output
def point_in_poly(x,y,poly):
n = len(poly)
inside = False
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
if x <= max(p1x,p2x):
if p1y != p2y:
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x,p1y = p2x,p2y
return inside
def ray_trace(line_vector, line_point, plane_point, plane_normal):
epsilon=1e-64
......@@ -136,8 +64,8 @@ def intersect_line_with_planes(pixel_nr, plane_normales, plane_points, line_vect
# estimate location with corner polygons
intersect_point1 = ray_trace(line_vector, line_point, point_poly1, corners_poly1_normale)
intersect_point2 = ray_trace(line_vector, line_point, point_poly2, corners_poly2_normale)
hit_1 = point_in_poly(intersect_point1[0], intersect_point1[1], [(corners[0][0],corners[0][1]), (corners[1][0],corners[1][1]), (corners[2][0],corners[2][1])])
hit_2 = point_in_poly(intersect_point2[0], intersect_point2[1], [(corners[2][0],corners[0][1]), (corners[3][0],corners[1][1]), (corners[0][0],corners[2][1])])
hit_1 = point_in_polygon2(intersect_point1[0], intersect_point1[1], [(corners[0][0],corners[0][1]), (corners[1][0],corners[1][1]), (corners[2][0],corners[2][1])])
hit_2 = point_in_polygon2(intersect_point2[0], intersect_point2[1], [(corners[2][0],corners[0][1]), (corners[3][0],corners[1][1]), (corners[0][0],corners[2][1])])
approximation_point = np.array((intersect_point1 if hit_1 else intersect_point2)*100, dtype='int')
if verbose: t_approximation = time.time() - t_start
......@@ -186,7 +114,7 @@ def process_planes(index_list, plane_normales, plane_points, line_vector, line_p
if(intersect_point is not None):
# test if in polygon
p1_x, p1_y, p2_x, p2_y, p3_x, p3_y = plane_points[plane_index, [0,1,3,4,6,7]].tolist()
hit = point_in_poly(intersect_point[0], intersect_point[1], [(p1_x, p1_y), (p2_x, p2_y), (p3_x, p3_y)] )
hit = point_in_polygon2(intersect_point[0], intersect_point[1], [(p1_x, p1_y), (p2_x, p2_y), (p3_x, p3_y)] )
if hit:
return intersect_point.tolist()
else:
......@@ -344,8 +272,8 @@ def main(argv):
print("\ncorners found:\n", ground_corner_coords)
if(np.isnan(ground_corner_coords.flat).any()):
print("corners missing, skipping Photo")
continue
print("corners missing, have to calculate with global corners :(")
ground_corner_coords = corners_global
# write shapefile:
# add start point as end point to close polygon
......@@ -365,7 +293,7 @@ def main(argv):
print("\n------\ncalculate now ground points for all sample pixels:")
print('Points to calculate:', sensor_pixels_vectors.shape[0])
print('Planes to intersect with:', DEM_normales.shape[0])
print('calculate intersections, running on', 10, 'cores\n')
print('calculate intersections, running on', multiprocessing.cpu_count()-2, 'cores\n')
m = Manager()
jobs = m.Queue()
......@@ -466,8 +394,8 @@ def main(argv):
print("\nwrite pixel matrix to TIFF")
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_coords_x_interpol.T), dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_coord_x.tif'), format='GTiff', )
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_coords_y_interpol.T), dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_coord_y.tif'), format='GTiff' )
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_coords_x_interpol.T)-DELTA_x, dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_coord_x.tif'), format='GTiff', )
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_coords_y_interpol.T)-DELTA_y, dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_coord_y.tif'), format='GTiff' )
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_coords_z_interpol.T), dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_coord_z.tif'), format='GTiff' )
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_angle_xy_interpol.T), dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_angle_xy.tif'), format='GTiff' )
gdalnumeric.SaveArray(np.array(np.fliplr(ground_pixel_angle_z_interpol.T), dtype=np.float32), os.path.join(output_directory, PhotoID + '_metadata_angle_z.tif'), format='GTiff' )
......
'''
Created on 16.03.2017
@author: luroth
'''
from pylab import plot,show
from numpy import vstack,array
from numpy.random import rand
import numpy.ma as ma
import numpy as np
from scipy.cluster.vq import kmeans,vq
from scipy import misc
import sklearn.cluster
image = misc.imread(r'E:\UAV\_Workspace_\2016_ethz_eschikon_FIP_50m_20160607\RGB\DSC02219.JPG')
image_mask_all = misc.imread(r'E:\UAV\_Workspace_\2016_ethz_eschikon_FIP_50m_20160607\output\meta\DSC02219_mask_lot.tif')
image_mask_soil = misc.imread(r'E:\UAV\_Workspace_\2016_ethz_eschikon_FIP_50m_20160607\output\meta\DSC02219_mask_soil.tif')
origin_shape = image.shape
image = image.reshape(-1,3)
image_mask_all = image_mask_all.reshape(-1)
image_mask_soil = image_mask_soil.reshape(-1)
image[image_mask_all==0,:] = 0
plt.imshow(image.reshape(origin_shape))
plt.show()
clf = sklearn.cluster.KMeans(n_clusters=6, n_jobs=-1)
clusters = clf.fit_predict(image)
clusters = clusters.reshape(origin_shape[0], origin_shape[1])
plt.imshow(clusters)
plt.show()
res_soil = ma.array(clusters, mask=image_mask_soil)
np.unique(res_soil)
import gdalnumeric
gdalnumeric.SaveArray(clusters, r'E:\UAV\_Workspace_\2016_ethz_eschikon_FIP_50m_general/test.tif', format='GTiff')
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment