-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #36 from PermafrostDiscoveryGateway/feature-35-ins…
…ar-processing Adding processing for InSAR datasets in ASCII xyds (lon, lat, displacement, stdev) format, which converts displacement to RGB using the viridis color scale, then converts to LAZ and creates a tileset of the resulting data.
- Loading branch information
Showing
7 changed files
with
756 additions
and
13 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
import pandas as pd | ||
from pathlib import Path | ||
from .lastools_iface import las2las | ||
from . import viridis | ||
|
||
def load_llvs(f: Path, std: bool=False): | ||
""" | ||
""" | ||
cols = ['lon', 'lat', 'disp', 'std'] | ||
usecols = [0, 1, 2, 3] if std else [0, 1, 2] | ||
return pd.read_csv(f, names=cols, header=None, usecols=usecols) | ||
|
||
def add_z(llvs: pd.DataFrame, std: bool=False): | ||
""" | ||
""" | ||
llvs['z'] = 0 | ||
if std: | ||
return llvs[['lon', 'lat', 'z', 'disp', 'std']] | ||
else: | ||
return llvs[['lon', 'lat', 'z', 'disp']] | ||
|
||
def get_rgb(llvs: pd.DataFrame, quantile: bool=False, std: bool=False): | ||
""" | ||
""" | ||
# get the color scale | ||
viridis_data = viridis() | ||
# put displacement in a number of bins equal to the length of the color scale array | ||
if not quantile: | ||
viridis_bin = pd.cut(llvs['disp'], bins=len(viridis_data), labels=False).to_list() | ||
else: | ||
viridis_bin = pd.qcut(llvs['disp'], q=len(viridis_data), labels=False).to_list() | ||
# look up the color values of each data point and put them in a new column | ||
llvs['r'] = [viridis_data[x][0] for x in viridis_bin] | ||
llvs['g'] = [viridis_data[x][1] for x in viridis_bin] | ||
llvs['b'] = [viridis_data[x][2] for x in viridis_bin] | ||
del llvs['disp'] | ||
if std: | ||
del llvs['std'] | ||
return llvs | ||
|
||
def write(llvs: pd.DataFrame, o: Path): | ||
""" | ||
""" | ||
llvs[['lon', 'lat', 'z', 'r', 'g', 'b']].to_csv(o, index=False, header=False) | ||
|
||
def llzrgb2las(f: Path, o: Path): | ||
""" | ||
""" | ||
las2las(f=f, output_file=o, llvrgb=True) | ||
|
||
def insar_pipeline(f: Path, quantile: bool): | ||
""" | ||
""" | ||
llvs = load_llvs(f=f) | ||
llvs = add_z(llvs=llvs) | ||
llvs = get_rgb(llvs=llvs, quantile=quantile) | ||
llzrgb = f.parent.absolute() / f"{f.stem}-llzrgb.csv" | ||
write(llvs, o=llzrgb) | ||
olaz = f.parent.absolute() / f"{f.stem}.laz" | ||
llzrgb2las(f=llzrgb, o=olaz) | ||
return olaz |
Oops, something went wrong.