-
Notifications
You must be signed in to change notification settings - Fork 23
/
wrf_hydro_functions.py
4447 lines (3840 loc) · 274 KB
/
wrf_hydro_functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
# ** Copyright UCAR (c) 2018
# ** University Corporation for Atmospheric Research(UCAR)
# ** National Center for Atmospheric Research(NCAR)
# ** Research Applications Laboratory(RAL)
# ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA
# ** 2016/4/27 10:00:00
# *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
# Written by Kevin Sampson, NCAR
# Modified 2019/05/29
# --- Import Modules --- #
import sys
import os
import csv # For reading input forecast points in CSV format
import time
import math
import zipfile
from zipfile import ZipFile, ZipInfo
import shutil
from collections import defaultdict # Added 09/03/2015 Needed for topological sorting algorthm
from itertools import takewhile, count # Added 09/03/2015 Needed for topological sorting algorthm # Added 07/10/2015 for ExmineOutputs
import netCDF4
import numpy
from arcpy.sa import *
try:
from itertools import izip as zip # Added 1/6/2020 for Python3 compatibility
except ImportError: # will be 3.x series
pass
# Pure python method of getting unique sums - from http://stackoverflow.com/questions/4373631/sum-array-by-number-in-numpy
from operator import itemgetter # Used in the group_min function
import collections # Used in the group_min function
try:
from packaging.version import parse as LooseVersion # To avoid deprecation warnings
except:
from distutils.version import LooseVersion
# --- End Import Modules --- #
# --- Module Configurations --- #
sys.dont_write_bytecode = True # Do not write compiled (.pyc) files
# --- End Module Configurations --- #
# --- Globals --- #
###################################################
# Output options
# Output file types
outNCType = 'NETCDF4_CLASSIC' # Define the output netCDF version for RouteLink.nc and LAKEPARM.nc
# Default output file names
FullDom = 'Fulldom_hires.nc' # Default Full Domain routing grid nc file
LDASFile = 'GEOGRID_LDASOUT_Spatial_Metadata.nc' # Defualt LDASOUT domain grid nc file
LK_nc = 'LAKEPARM.nc' # Default Lake parameter table name [.nc]
LK_tbl = 'LAKEPARM.TBL' # Default Lake parameter table name [.TBL]
RT_nc = 'Route_Link.nc' # Default Route Link parameter table name
GW_nc = 'GWBUCKPARM.nc' # Default groundwater bucket parameter table name
GWGRID_nc = 'GWBASINS.nc'
GW_ASCII = 'gw_basns_geogrid.txt' # Default Groundwater Basins ASCII grid output
GW_TBL = 'GWBUCKPARM.TBL'
StreamSHP = 'streams.shp' # Default streams shapefile name
TempLakeFile = os.path.join("in_memory", "in_lakes_clip") # Temporary output lake file (clipped to domain). Was 'in_lakes_clip.shp'
LakesShp = 'lakes.shp' # Default lake shapefile name
###################################################
###################################################
# Other Options
WRFH_Version = 5.2 # WRF-Hydro version to be executed using the outputs of this tool
PpVersion = 'v5.2 (04/2021)' # WRF-Hydro ArcGIS Pre-processor version to add to FullDom metadata
CFConv = 'CF-1.5' # CF-Conventions version to place in the 'Conventions' attribute of RouteLink files
NoDataVal = -9999 # Default NoData value for gridded variables
walker = 3 # Number of cells to walk downstream before gaged catchment delineation
LK_walker = 3 # Number of cells to walk downstream to get minimum lake elevation
z_limit = 1000.0 # Maximum fill depth (z-limit) between a sink and it's pour point
lksatfac_val = 1000.0 # Default LKSATFAC value (unitless coefficient)
minDepth = 1.0 # Minimum active lake depth for lakes with no elevation variation
# Elevation resampling method. Options: ["NEAREST", "BILINEAR", "CUBIC", "MAJORITY"]
ElevResampleMethod = "BILINEAR" # Method to be used for resampling high-resolution elevation to the model grid
###################################################
###################################################
# Geospatial Parameters
# Initiate dictionaries of GEOGRID projections and parameters
# See http://www.mmm.ucar.edu/wrf/users/docs/user_guide_V3/users_guide_chap3.htm#_Description_of_the_1
projdict = {1: 'Lambert Conformal Conic',
2: 'Polar Stereographic',
3: 'Mercator',
6: 'Cylindrical Equidistant'}
CF_projdict = {1: "lambert_conformal_conic",
2: "polar_stereographic",
3: "mercator",
6: "latitude_longitude",
0: "crs"}
# Point time-series CF-netCDF file coordinate system
'''Note that the point netCDF files are handled using a separate coordinate system than the grids.
This is because input data are usually in WGS84 or some other global spheroidal datum. We treat
these coordinates as though there is no difference between a sphere and a spheroid with respect
to latitude. Thus, we can choose an output coordinate system for the points, although no
transformation is performed. Properly transforming the points back and forth betwen sphere and
spheroid dramatically increases the runtime of the tools, with clear obvious benefit.'''
pointCF = True # Switch to turn on CF-netCDF point time-series metadata attributes
pointSR = 4326 # The spatial reference system of the point time-series netCDF files (RouteLink, LAKEPARM). NAD83=4269, WGS84=4326
# Global attributes for altering the sphere radius used in computations. Do not alter sphere_radius for standard WRF-Hydro simulations
sphere_radius = 6370000.0 # Radius of sphere to use (WRF Default = 6370000.0m)
wkt_text = "GEOGCS['GCS_Sphere_CUSTOM',DATUM['D_Sphere',SPHEROID['Sphere',%s,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.99462786704589E-09;0.001;0.001;IsHighPrecision" %sphere_radius
#same_spehre = arcpy.SpatialReference(104128) # EMEP sphere (same radius as WRF sphere)
# Unify all coordinate system variables to have the same name ("crs"). Ths makes it easier for WRF-Hydro output routines to identify the variable and transpose it to output files
crsVarname = True # Switch to make all coordinate system variables = "crs" instead of related to the coordinate system name
crsVar = CF_projdict[0] # Expose this as a global for other functions in other scripts to use
#crsVar = 'ProjectionCoordinateSystem'
#Custom Geotransformations for spheroid-to-sphere translation
geoTransfmName = "GeoTransform_Null_WRFHydro" # Custom Geotransformation Name
customGeoTransfm = "GEOGTRAN[METHOD['Null']]" # Null Transformation
#customGeoTransfm = "GEOGTRAN[METHOD['Geocentric_Translation'],PARAMETER['X_Axis_Translation',''],PARAMETER['Y_Axis_Translation',''],PARAMETER['Z_Axis_Translation','']]" # Zero-parameter Geocentric Translation
#customGeoTransfm = "GEOGTRAN[METHOD['Geocentric_Translation'],PARAMETER['X_Axis_Translation',0.0],PARAMETER['Y_Axis_Translation',0.0],PARAMETER['Z_Axis_Translation',0.0]]"
###################################################
###################################################
# Channel Routing default parameters
Qi = 0 # Initial Flow in link (cms)
MusK = 3600 # Muskingum routing time (s)
MusX = 0.2 # Muskingum weighting coefficient
n = 0.035 # Manning's roughness
ChSlp = 0.05 # Channel Side Slope (%; drop/length)
BtmWdth = 5 # Bottom Width of Channel (m)
Kc = 0 # channel conductivity (mm/hour)
minSo = 0.001 # Minimum slope allowed in RouteLink file
# Order-based Mannings N values for Strahler orders 1-10
ManningsOrd = True # Switch to activate order-based Mannings N values
Mannings_Order = {1:0.096,
2:0.076,
3:0.060,
4:0.047,
5:0.037,
6:0.030,
7:0.025,
8:0.021,
9:0.018,
10:0.022} # Values based on CONUS JTTI research, originally from LR 7/01/2020, confirmed by JMC 6/18/21
# Order-based Channel Side-Slope values for Strahler orders 1-10
ChSSlpOrd = True # Switch to activate order-based Channel Side-Slope values
Mannings_ChSSlp = {1:0.03,
2:0.03,
3:0.03,
4:0.04,
5:0.04,
6:0.04,
7:0.04,
8:0.04,
9:0.05,
10:0.10} # Values from LR 7/01/2020
# Order-based Bottom-width values for Strahler orders 1-10
BwOrd = True # Switch to activate order-based Bottom-width values
Mannings_Bw = {1:1.6,
2:2.4,
3:3.5,
4:5.3,
5:7.4,
6:11.,
7:14.,
8:16.,
9:26.,
10:110.} # Values from LR 7/01/2020
# Other channel options
maskRL = False # Allow masking of channels in RouteLink file. May cause WRF-Hydro to crash if True. This will also mask GWBASINS grid.
Streams_addFields = ['link', 'to', 'gages', 'Lake'] # Variables from RouteLink file to add to the output stream shapefile (for convenience, plotting in GIS)
###################################################
###################################################
#Default Lake Routing parameters
OrificeC = 0.1
OrificA = 1.0
WeirC = 0.4
WeirL = 10.0 # New default prescribed by D. Yates 5/11/2017 (10m default weir length). Old default weir length (0.0m).
ifd_Val = 0.90 # Default initial fraction water depth (90%)
dam_length = 10.0 # Default length of the dam
out_LKtype = ['nc'] # Default output lake parameter file format ['nc', 'ascii']
defaultLakeID = "NEWID" # Default new LakeID field for lakes, numbered 1..n
Lakes_addFields = ['lake_id'] # Variables from LAKEPARM file to add to the output lake shapefile (for convenience, plotting in GIS)
lake_id_64 = False # 6/26/2020: Allows for 64-bit integer lake ID types. Supporting NHDPlus HR.
subsetLakes = True # Option to eliminate lakes that do not intersect gridded channel network (gridded runs only)
###################################################
###################################################
# Default groundwater bucket (GWBUCKPARM) parameters
coeff = 1.0000 # Bucket model coefficient
expon = 3.000 # Bucket model exponent
zmax = 50.00 # Conceptual maximum depth of the bucket
zinit = 10.0000 # Initial depth of water in the bucket model
Loss = 0 # Not intended for Community WRF-Hydro
out_2Dtype = ['nc'] # Default output 2D groundwater bucket grid format: ['nc', 'ascii']
out_1Dtype = '.nc' # Default output 1D groundwater parameter (GWBUCKPARM.nc) format: ['.nc', '.nc and .TBL', '.TBL']
maskGW_Basins = False # Option to mask the GWBASINS.nc grid to only active channels
addLoss = False # Option to add loss function parameter to groundwater buckets. Not intended for Community WRF-Hydro use.s
###################################################
###################################################
# Globals that support lake pre-processing
datestr = time.strftime("%Y_%m_%d") # Date string to append to output files
FLID = "ARCID" # Field name for the flowline IDs
LakeAssoc = 'WBAREACOMI' # Field name containing link-to-lake association
NoDownstream = [None, -1, 0] # A list of possible values indicating that the downstream segment is invalid (ocean, network endpoint, etc.)
LkNodata = 0 # Set the nodata value for the link-to-lake association
hydroSeq = 'HydroSeq' # Fieldname that stores a hydrologic sequence (ascending from downstream to upstream)
save_Lake_Link_Type_arr = True # Switch for saving the Lake_Link_Type_arr array to CSV
###################################################
###################################################
# Global dictionary used in Add_Param_data_to_FC(). Specify the mapping between
# netCDF variables and the field name, ArcGIS field type, and numpy dtype to use.
# Field information. FC field name: [NC variable name, ArcGIS field type, numpy dtype]
fields = {'link': ['link', 'LONG', 'i8'],
'from': ['from', 'LONG', 'i8'],
'to': ['to', 'LONG', 'i8'],
'lon': ['lon', 'FLOAT', 'f4'],
'lat': ['lat', 'FLOAT', 'f4'],
'alt': ['alt', 'FLOAT', 'f4'],
'order_': ['order', 'SHORT', 'i4'],
'Qi': ['Qi', 'FLOAT', 'f4'],
'MusK': ['MusK', 'FLOAT', 'f4'],
'MusX': ['MusX', 'FLOAT', 'f4'],
'Length': ['Length', 'FLOAT', 'f4'],
'n': ['n', 'FLOAT', 'f4'],
'So': ['So', 'FLOAT', 'f4'],
'ChSlp': ['ChSlp', 'FLOAT', 'f4'],
'BtmWdth': ['BtmWdth', 'FLOAT', 'f4'],
'x': ['x', 'FLOAT', 'f4'],
'y': ['y', 'FLOAT', 'f4'],
'Kchan': ['Kchan', 'SHORT', 'i4'],
'Lake': ['NHDWaterbodyComID', 'LONG', 'i4'],
'gages': ['gages', 'TEXT', '|S15'],
'Lake': ['NHDWaterbodyComID', 'LONG', 'i4'],
'lake_id': ['lake_id', 'LONG', 'i8'],
'LkArea': ['LkArea', 'FLOAT', 'f4'],
'LkMxE': ['LkMxE', 'FLOAT', 'f4'],
'WeirC': ['WeirC', 'FLOAT', 'f4'],
'WeirL': ['WeirL', 'FLOAT', 'f4'],
'OrificeC': ['OrificeC', 'FLOAT', 'f4'],
'OrificeA': ['OrificeA', 'FLOAT', 'f4'],
'OrificeE': ['OrificeE', 'FLOAT', 'f4'],
'WeirE': ['WeirE', 'FLOAT', 'f4'],
'ascendingIndex': ['ascendingIndex', 'SHORT', 'i4'],
'ifd': ['ifd', 'FLOAT', 'f4']}
if WRFH_Version >= 5.2:
fields.update({'Dam_Length': ['Dam_Length', 'FLOAT', 'f4']})
if lake_id_64:
# Alter the field type information for writing out a feature class later
fields['lake_id'] = ['lake_id', 'TEXT', 'f8']
###################################################
###################################################
# Globals that support wrfinput file generation
# Soil type to use as a fill value in case conflicts between soil water and land cover water cells:
# If the script encounters a cell that is classified as land in the land use field (LU_INDEX)
# but is classified as a water soil type, it will replace the soil type with the value you
# specify below. Ideally there are not very many of these, so you can simply choose the most
# common soil type in your domain. Alternatively, you can set to a "bad" value (e.g., -8888)
# to see how many of these conflicts there are. If you do this DO NOT RUN THE MODEL WITH THESE
# BAD VALUES. Instead, fix them manually with a neighbor fill or similar fill algorithm.
fillsoiltyp = 3
# The script will attempt to find the dominant SOILCTOP value. If LU_INDEX is water,
# this will be water, otherwise it will be the dominant fractional non-water SOILCTOP
# category. If no dominant land class can be determined, set the soil category to
# use as a fill below.
dom_lc_fill = 8
# User-specified soil layer thickness (dzs). Also used to calculate depth of the center of each layer (zs)
dzs = [0.1, 0.3, 0.6, 1.0] # Soil layer thickness top layer to bottom (m)
nsoil = 4 # Number of soil layers (e.g., 4)
# Switch for fixing SoilT values of 0 over water areas
fix_zero_over_water = True
###################################################
###################################################
# Dimension names to be used to identify certain known dimensions
yDims = ['south_north', 'y']
xDims = ['west_east', 'x']
timeDim = ['Time', 'time']
###################################################
# --- End Globals --- #
# --- Classes --- #
class ZipCompat(ZipFile):
def __init__(self, *args, **kwargs):
ZipFile.__init__(self, *args, **kwargs)
def extract(self, member, path=None):
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path)
def extractall(self, path=None, members=None, pwd=None):
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path)
def _extract_member(self, member, targetpath):
if (targetpath[-1:] in (os.path.sep, os.path.altsep) and len(os.path.splitdrive(targetpath)[1]) > 1):
targetpath = targetpath[:-1]
if member.filename[0] == '/':
targetpath = os.path.join(targetpath, member.filename[1:])
else:
targetpath = os.path.join(targetpath, member.filename)
targetpath = os.path.normpath(targetpath)
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
target = open(targetpath, "wb") # 5/31/2019: Supporting Python3
try:
target.write(self.read(member.filename))
finally:
target.close()
return targetpath
class TeeNoFile(object):
'''
Send print statements to a log file:
http://web.archive.org/web/20141016185743/https://mail.python.org/pipermail/python-list/2007-May/460639.html
https://stackoverflow.com/questions/11124093/redirect-python-print-output-to-logger/11124247
'''
def __init__(self, name, mode):
self.file = open(name, mode)
self.stdout = sys.stdout
sys.stdout = self
def close(self):
if self.stdout is not None:
sys.stdout = self.stdout
self.stdout = None
if self.file is not None:
self.file.close()
self.file = None
def write(self, data):
self.file.write(data)
self.stdout.write(data)
def flush(self):
self.file.flush()
self.stdout.flush()
def __del__(self):
self.close()
class WRF_Hydro_Grid:
'''
Class with which to create the WRF-Hydro grid representation. Provide grid
information to initiate the class, and use getgrid() to generate a grid mesh
and index information about the intersecting cells.
Note: The i,j index begins with (1,1) in the upper-left corner.
'''
def __init__(self, arcpy, rootgrp):
"""
The first step of the process chain, in which the input NetCDF file gets
georeferenced and projection files are created
9/27/2017: This function was altered to use netCDF4-python library instead
of arcpy.NetCDFFileProperties. Also changed the corner_lat and corner_lon
index to reflect the lower left corner point rather than lower left center as
the known point.
10/6/2017: Proj4 string generation was added, with definitions adapted from
Adapted from https://github.com/NCAR/wrf-python/blob/develop/src/wrf/projection.py
5/29/2019: Removed MakeNetCDFRasterLayer_md function call to avoid bugs in ArcGIS
10.7 (BUG-000122122, BUG-000122125).
"""
corner_index = 12 # 12 = Lower Left of the Unstaggered grid
# First step: Import and georeference NetCDF file
printMessages(arcpy, ['Step 1: NetCDF Conversion initiated...']) # Initiate log list for this process
# Read input WPS GEOGRID file
# Loop through global variables in NetCDF file to gather projection information
dimensions = rootgrp.dimensions
globalAtts = rootgrp.__dict__ # Read all global attributes into a dictionary
map_pro = globalAtts['MAP_PROJ'] # Find out which projection this GEOGRID file is in
printMessages(arcpy, [' Map Projection: %s' %projdict[map_pro]])
# Collect grid corner XY and DX DY for creating ascii raster later
if 'corner_lats' in globalAtts:
corner_lat = float(globalAtts['corner_lats'][corner_index]) # Note: The values returned are corner points of the mass grid
if 'corner_lons' in globalAtts:
corner_lon = float(globalAtts['corner_lons'][corner_index]) # Note: The values returned are corner points of the mass grid
if 'DX' in globalAtts:
self.DX = float(globalAtts['DX'])
if 'DY' in globalAtts:
self.DY = float(globalAtts['DY'])
# Collect necessary information to put together the projection file
if 'TRUELAT1' in globalAtts:
standard_parallel_1 = globalAtts['TRUELAT1']
if 'TRUELAT2' in globalAtts:
standard_parallel_2 = globalAtts['TRUELAT2']
if 'STAND_LON' in globalAtts:
central_meridian = globalAtts['STAND_LON']
if 'POLE_LAT' in globalAtts:
pole_latitude = globalAtts['POLE_LAT']
if 'POLE_LON' in globalAtts:
pole_longitude = globalAtts['POLE_LON']
if 'CEN_LON' in globalAtts:
central_longitude = globalAtts['CEN_LON']
if 'MOAD_CEN_LAT' in globalAtts:
printMessages(arcpy, [' Using MOAD_CEN_LAT for latitude of origin.'])
latitude_of_origin = globalAtts['MOAD_CEN_LAT'] # Added 2/26/2017 by KMS
elif 'CEN_LAT' in globalAtts:
printMessages(arcpy, [' Using CEN_LAT for latitude of origin.'])
latitude_of_origin = globalAtts['CEN_LAT']
# Check to see if the input netCDF is a WPS-Generated Geogrid file.
if 'TITLE' in globalAtts and 'GEOGRID' in globalAtts['TITLE']:
self.isGeogrid = True
else:
self.isGeogrid = False
del globalAtts
# Handle expected dimension names from either Geogrid or Fulldom
if 'south_north' in dimensions:
self.nrows = len(dimensions['south_north'])
elif 'y' in dimensions:
self.nrows = len(dimensions['y'])
if 'west_east' in dimensions:
self.ncols = len(dimensions['west_east'])
elif 'x' in dimensions:
self.ncols = len(dimensions['x'])
del dimensions
# Create Projection file with information from NetCDF global attributes
sr2 = arcpy.SpatialReference()
if map_pro == 1:
# Lambert Conformal Conic
if 'standard_parallel_2' in locals():
#projname = 'Lambert_Conformal_Conic_2SP'
printMessages(arcpy, [' Using Standard Parallel 2 in Lambert Conformal Conic map projection.'])
else:
# According to http://webhelp.esri.com/arcgisdesktop/9.2/index.cfm?TopicName=Lambert_Conformal_Conic
#projname = 'Lambert_Conformal_Conic_1SP'
standard_parallel_2 = standard_parallel_1
projname = 'Lambert_Conformal_Conic'
Projection_String = ('PROJCS["Sphere_Lambert_Conformal_Conic",'
'GEOGCS["GCS_Sphere",'
'DATUM["D_Sphere",'
'SPHEROID["Sphere",' + str(sphere_radius) + ',0.0]],'
'PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],'
'PROJECTION["' + projname + '"],'
'PARAMETER["false_easting",0.0],'
'PARAMETER["false_northing",0.0],'
'PARAMETER["central_meridian",' + str(central_meridian) + '],'
'PARAMETER["standard_parallel_1",' + str(standard_parallel_1) + '],'
'PARAMETER["standard_parallel_2",' + str(standard_parallel_2) + '],'
'PARAMETER["latitude_of_origin",' + str(latitude_of_origin) + '],'
'UNIT["Meter",1.0]]')
proj4 = ("+proj=lcc +units=m +a={} +b={} +lat_1={} +lat_2={} +lat_0={} +lon_0={} +x_0=0 +y_0=0 +k_0=1.0 +nadgrids=@null +wktext +no_defs ".format(
str(sphere_radius),
str(sphere_radius),
str(standard_parallel_1),
str(standard_parallel_2),
str(latitude_of_origin),
str(central_meridian)))
elif map_pro == 2:
# Polar Stereographic
## # Set up pole latitude
## phi1 = float(standard_parallel_1)
##
## ### Back out the central_scale_factor (minimum scale factor?) using formula below using Snyder 1987 p.157 (USGS Paper 1395)
## ##phi = math.copysign(float(pole_latitude), float(latitude_of_origin)) # Get the sign right for the pole using sign of CEN_LAT (latitude_of_origin)
## ##central_scale_factor = (1 + (math.sin(math.radians(phi1))*math.sin(math.radians(phi))) + (math.cos(math.radians(float(phi1)))*math.cos(math.radians(phi))))/2
##
## # Method where central scale factor is k0, Derivation from C. Rollins 2011, equation 1: http://earth-info.nga.mil/GandG/coordsys/polar_stereographic/Polar_Stereo_phi1_from_k0_memo.pdf
## # Using Rollins 2011 to perform central scale factor calculations. For a sphere, the equation collapses to be much more compact (e=0, k90=1)
## central_scale_factor = (1 + math.sin(math.radians(abs(phi1))))/2 # Equation for k0, assumes k90 = 1, e=0. This is a sphere, so no flattening
##
## # Hardcode in the pole as the latitude of origin (correct assumption?) Added 4/4/2017 by KMS
## standard_parallel_1 = pole_latitude
##
## printMessages(arcpy, [' Central Scale Factor: %s' %central_scale_factor])
## Projection_String = ('PROJCS["Sphere_Stereographic",'
## 'GEOGCS["GCS_Sphere",'
## 'DATUM["D_Sphere",'
## 'SPHEROID["Sphere",' + str(sphere_radius) + ',0.0]],'
## 'PRIMEM["Greenwich",0.0],'
## 'UNIT["Degree",0.0174532925199433]],'
## 'PROJECTION["Stereographic"],'
## 'PARAMETER["False_Easting",0.0],'
## 'PARAMETER["False_Northing",0.0],'
## 'PARAMETER["Central_Meridian",' + str(central_meridian) + '],'
## 'PARAMETER["Scale_Factor",' + str(central_scale_factor) + '],'
## 'PARAMETER["Latitude_Of_Origin",' + str(standard_parallel_1) + '],'
## 'UNIT["Meter",1.0]]')
# 3/30/2020: Testing out utility of providing "Stereographic_North_Pole" or "Stereographic_South_Pole"
# definitions. However, this may not work for all WRF-supported aspects of stereographic CRS.
#if pole_latitude > 0:
if latitude_of_origin > 0:
pole_orientation = 'North'
else:
pole_orientation = 'South'
Projection_String = ('PROJCS["Sphere_Stereographic",'
'GEOGCS["GCS_Sphere",'
'DATUM["D_Sphere",'
'SPHEROID["Sphere",' + str(sphere_radius) + ',0.0]],'
'PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],'
'PROJECTION["Stereographic_' + pole_orientation + '_Pole"],'
'PARAMETER["False_Easting",0.0],'
'PARAMETER["False_Northing",0.0],'
'PARAMETER["Central_Meridian",' + str(central_meridian) + '],'
'PARAMETER["standard_parallel_1",' + str(standard_parallel_1) + '],'
'UNIT["Meter",1.0]]')
proj4 = ("+proj=stere +units=m +a={} +b={} +lat0={} +lon_0={} +lat_ts={}".format(
str(sphere_radius),
str(sphere_radius),
str(pole_latitude),
str(central_meridian),
str(standard_parallel_1)))
elif map_pro == 3:
# Mercator Projection
# Trap nonesense values produced by WPS
if central_meridian >= 1e20:
central_meridian_val = central_longitude
else:
central_meridian_val = central_meridian
Projection_String = ('PROJCS["Sphere_Mercator",'
'GEOGCS["GCS_Sphere",'
'DATUM["D_Sphere",'
'SPHEROID["Sphere",' + str(sphere_radius) + ',0.0]],'
'PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],'
'PROJECTION["Mercator"],'
'PARAMETER["False_Easting",0.0],'
'PARAMETER["False_Northing",0.0],'
'PARAMETER["Central_Meridian",' + str(central_meridian_val) + '],'
'PARAMETER["Standard_Parallel_1",' + str(standard_parallel_1) + '],'
'UNIT["Meter",1.0]]')
proj4 = ("+proj=merc +units=m +a={} +b={} +lon_0={} +lat_ts={}".format(
str(sphere_radius),
str(sphere_radius),
str(central_meridian_val),
str(standard_parallel_1)))
elif map_pro == 6:
# Cylindrical Equidistant (or Rotated Pole)
if pole_latitude != float(90) or pole_longitude != float(0):
# if pole_latitude, pole_longitude, or stand_lon are changed from thier default values, the pole is 'rotated'.
printMessages(arcpy, [' Cylindrical Equidistant projection with a rotated pole is not currently supported.'])
##proj4 = ("+proj=ob_tran +o_proj=latlon +a={} +b={} +to_meter={} +o_lon_p={} +o_lat_p={} +lon_0={}".format(
## sphere_radius,
## sphere_radius,
## math.radians(1),
## 180.0 - pole_longitude,
## pole_latitude,
## 180.0 + pole_longitude))
sys.exit(1)
else:
# Check units (linear unit not used in this projection). GCS?
Projection_String = ('PROJCS["Sphere_Equidistant_Cylindrical",'
'GEOGCS["GCS_Sphere",'
'DATUM["D_Sphere",'
'SPHEROID["Sphere",' + str(sphere_radius) + ',0.0]],'
'PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],'
'PROJECTION["Equidistant_Cylindrical"],'
'PARAMETER["False_Easting",0.0],'
'PARAMETER["False_Northing",0.0],'
'PARAMETER["Central_Meridian",' + str(central_meridian) + '],'
'PARAMETER["Standard_Parallel_1",0.0],'
'UNIT["Meter",1.0]]') # 'UNIT["Degree", 1.0]]') # ?? For lat-lon grid?
proj4 = ("+proj=eqc +units=m +a={} +b={} +lon_0={}".format(str(sphere_radius), str(sphere_radius), str(central_meridian)))
# Create a point geometry object from gathered corner point data
sr2.loadFromString(Projection_String)
sr1 = arcpy.SpatialReference()
sr1.loadFromString(wkt_text) # Load the Sphere datum CRS using WKT
point = arcpy.Point()
point.X = corner_lon
point.Y = corner_lat
pointGeometry = arcpy.PointGeometry(point, sr1)
projpoint = pointGeometry.projectAs(sr2) # Optionally add transformation method:
# Get projected X and Y of the point geometry
point2 = arcpy.Point(projpoint.firstPoint.X, projpoint.firstPoint.Y) # Lower Left Corner Point
# Populate class attributes
self.x00 = point2.X # X value doesn't change from LLcorner to UL corner
self.y00 = point2.Y + float(abs(self.DY)*self.nrows) # Adjust Y from LL to UL
self.proj = sr2
#self.WKT = Projection_String.replace("'", '"') # Replace ' with " so Esri can read the PE String properly when running NetCDFtoRaster
self.WKT = sr2.exportToString()
self.proj4 = proj4
self.point = point2
self.map_pro = map_pro
printMessages(arcpy, [' Georeferencing step completed without error.'])
del point, sr1, projpoint, pointGeometry
def regrid(self, regrid_factor):
'''
Change the grid cell spacing while keeping all other grid parameters
the same.
'''
self.DX = float(self.DX)/float(regrid_factor)
self.DY = float(self.DY)/float(regrid_factor)
self.nrows = int(self.nrows*regrid_factor)
self.ncols = int(self.ncols*regrid_factor)
print(' New grid spacing: dx={0}, dy={1}'.format(self.DX, self.DY))
print(' New dimensions: rows={0}, cols={1}'.format(self.nrows, self.ncols))
return self
def GeoTransform(self):
'''
Return the affine transformation for this grid. Assumes a 0 rotation grid.
(top left x, w-e resolution, 0=North up, top left y, 0 = North up, n-s pixel resolution (negative value))
'''
return (self.x00, self.DX, 0, self.y00, 0, -self.DY)
def GeoTransformStr(self):
return ' '.join([str(item) for item in self.GeoTransform()])
def getxy(self):
"""
This function will use the affine transformation (GeoTransform) to produce an
array of X and Y 1D arrays. Note that the GDAL affine transformation provides
the grid cell coordinates from the upper left corner. This is typical in GIS
applications. However, WRF uses a south_north ordering, where the arrays are
written from the bottom to the top.
The input raster object will be used as a template for the output rasters.
"""
print(' Starting Process: Building to XMap/YMap')
# Build i,j arrays
j = numpy.arange(self.nrows) + float(0.5) # Add 0.5 to estimate coordinate of grid cell centers
i = numpy.arange(self.ncols) + float(0.5) # Add 0.5 to estimate coordinate of grid cell centers
# col, row to x, y From https://www.perrygeo.com/python-affine-transforms.html
x = (i * self.DX) + self.x00
y = (j * -self.DY) + self.y00
del i, j
# Create 2D arrays from 1D
xmap = numpy.repeat(x[numpy.newaxis, :], y.shape, 0)
ymap = numpy.repeat(y[:, numpy.newaxis], x.shape, 1)
del x, y
print(' Conversion of input raster to XMap/YMap completed without error.')
return xmap, ymap
def grid_extent(self, arcpy):
'''
Return the grid bounding extent [xMin, yMin, xMax, yMax]
'''
xMax = self.x00 + (float(self.ncols)*self.DX)
yMin = self.y00 + (float(self.nrows)*-self.DY)
extent_obj = arcpy.Extent(self.x00, yMin, xMax, self.y00)
return extent_obj
def numpy_to_Raster(self, arcpy, in_arr, value_to_nodata=NoDataVal):
'''This funciton takes in an input netCDF file, a variable name, the ouput
raster name, and the projection definition and writes the grid to the output
raster. This is useful, for example, if you have a FullDom netCDF file and
the GEOGRID that defines the domain. You can output any of the FullDom variables
to raster.
Only use 2D arrays as input.'''
# 7/16/2020: Check to make sure input is not a masked array. Happens in ArcGIS Desktop (python 2.7).
if numpy.ma.isMA(in_arr):
in_arr = in_arr.data
# Process: Numpy Array to Raster
nc_raster = arcpy.NumPyArrayToRaster(in_arr, self.point, self.DX, self.DY, value_to_nodata)
# Process: Define Projection
arcpy.DefineProjection_management(nc_raster, self.proj)
return nc_raster
def domain_shapefile(self, arcpy, in_raster, outputFile):
"""This process creates a shapefile that bounds the GEOGRID file domain. This
requires the Spatial Analyst extension."""
printMessages(arcpy, ['Step 2: Build constant raster and convert to polygon...'])
# Set environments
arcpy.env.overwriteOutput = True
arcpy.env.outputCoordinateSystem = self.proj
# Build constant raster
RasterLayer = "RasterLayer"
arcpy.MakeRasterLayer_management(in_raster, RasterLayer)
outConstRaster = Con(IsNull(RasterLayer) == 1, 0, 0) # New method 10/29/2018: Use Con to eliminate NoData cells if there are any.
# Raster to Polygon conversion
arcpy.RasterToPolygon_conversion(outConstRaster, outputFile, "NO_SIMPLIFY") #, "VALUE")
# Clean up
arcpy.Delete_management(outConstRaster)
printMessages(arcpy, [' Finished building GEOGRID domain boundary shapefile: {0}.'.format(outputFile)])
def xy_to_grid_ij(self, x, y):
'''
This function converts a coordinate in (x,y) to the correct row and column
on a grid. Code from: https://www.perrygeo.com/python-affine-transforms.html
Grid indices are 0-based.
'''
# x,y to col,row.
col = int((x - self.x00) / self.DX)
row = abs(int((y - self.y00) / self.DY))
return row, col
def grid_ij_to_xy(self, col, row):
'''
This function converts a 2D grid index (i,j) the grid cell center coordinate
(x,y) in the grid coordinate system.
Code from: https://www.perrygeo.com/python-affine-transforms.html
Grid indices are 0-based.
'''
# col, row to x, y
x = (col * self.DX) + self.x00 + self.DX/2.0
y = (row * -self.DY) + self.y00 - self.DY/2.0
return x, y
def project_to_model_grid(self, arcpy, in_raster, out_raster, resampling=ElevResampleMethod):
'''
Project an input raster to the output cellsize and extent of the grid object.
'''
# Set environments
arcpy.ResetEnvironments() # Added 2016-02-11
arcpy.env.outputCoordinateSystem = self.proj
arcpy.env.extent = self.grid_extent(arcpy)
arcpy.env.cellSize = self.DX
# Project the input CRS to the output CRS
arcpy.ProjectRaster_management(in_raster, out_raster, self.proj, resampling, self.DX, "#", "#", in_raster.spatialReference)
def getgrid(self, arcpy, x, y):
'''
There are nine positions relative to a grid cell center (inclusive).
[center, left center, right center, bottom center, top center, LL, UL, UR, LR]
Method with which to create the coordinates relative to a grid cell center
coordinate in unprojected cartesian space. Use getgrid() to generate a
set of points on the staggered grid around the center point. Used to
re-generate corner_lats and corner_lons in a geogrid file.
'''
# Store all points in a list
points = [] # Initiate list to store point geometries
# Use multipliers of DX/DY to get the relative coordinates based on the center
multipliersX = [0, -0.5, 0.5, 0, 0, -0.5, -0.5, 0.5, 0.5] # DX multipliers
multipliersY = [0, 0, 0, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5] # DY multipliers
for multiX, multiY in zip(multipliersX, multipliersY):
point = arcpy.Point(float(x+(abs(self.DX)*float(multiX))), float(y+(abs(self.DY)*float(multiY))))
points.append(arcpy.PointGeometry(point, self.proj))
del point, multipliersX, multipliersY, multiX, multiY
return points
# Function to determine a set of corner lats and lons from a single available value
def find_latlon(self, arcpy, inpoint):
'''
Take a grid cell center point and produce a list of all the corners
coordinates for that point.
'''
# Setup the lat/lon point CRS
insrs = arcpy.SpatialReference(pointSR)
# Create coordinate transform for this point and transform
pointGeometry = arcpy.PointGeometry(inpoint, insrs)
projpoint = pointGeometry.projectAs(self.proj) # Convert to latitude/longitude on the sphere
# Gather all corner coordinates for this point in projected space
points = self.getgrid(arcpy, projpoint.firstPoint.X, projpoint.firstPoint.Y)
# Conver back to original coordinate system coordinates
points_proj = []
for num, pt in enumerate(points):
pt2 = points[num]
pt2.projectAs(insrs)
points_proj.append(pt2)
del num, pt, pt2, inpoint, points, pointGeometry
return points_proj # Return list of out-points
# --- End Classes --- #
# --- Functions --- #
def printMessages(arcpy, messages):
'''provide a list of messages and they will be printed and returned as tool messages.'''
for i in messages:
print(i)
arcpy.AddMessage(i)
def makeoutncfile(arcpy, raster, outname, outvar, projdir):
outfile = os.path.join(projdir, outname)
arcpy.RasterToNetCDF_md(raster, outfile, outvar, "", "x", "y")
printMessages(arcpy, [' Process: {0} completed without error'.format(outname)])
printMessages(arcpy, [' Output File: {0}'.format(outfile)])
def ReprojectCoords(arcpy, xcoords, ycoords, src_srs, tgt_srs):
'''
Adapted from:
https://gis.stackexchange.com/questions/57834/how-to-get-raster-corner-coordinates-using-python-gdal-bindings
Reproject a list of x,y coordinates.
'''
tic1 = time.time()
ravel_x = numpy.ravel(xcoords)
ravel_y = numpy.ravel(ycoords)
trans_x = numpy.zeros(ravel_x.shape, ravel_x.dtype)
trans_y = numpy.zeros(ravel_y.shape, ravel_y.dtype)
point = arcpy.Point()
for num, (x, y) in enumerate(zip(ravel_x, ravel_y)):
point.X = float(x)
point.Y = float(y)
pointGeometry = arcpy.PointGeometry(point, src_srs)
projpoint = pointGeometry.projectAs(tgt_srs) # Optionally add transformation method:
x1 = projpoint.firstPoint.X
y1 = projpoint.firstPoint.Y
trans_x[num] = x1
trans_y[num] = y1
# reshape transformed coordinate arrays of the same shape as input coordinate arrays
trans_x = trans_x.reshape(*xcoords.shape)
trans_y = trans_y.reshape(*ycoords.shape)
printMessages(arcpy, ['Completed transforming coordinate pairs [{0}] in {1: 3.2f} seconds.'.format(num, time.time()-tic1)])
return trans_x, trans_y
def zipws(arcpy, path, zip, keep, nclist):
path = os.path.normpath(path)
for dirpath, dirnames, filenames in os.walk(path):
for file in filenames:
if file in nclist:
try:
if keep:
zip.write(os.path.join(dirpath, file), os.path.join(os.sep + os.path.join(dirpath, file)[len(path) + len(os.sep):]))
except Exception as e:
printMessages(arcpy, ['Exception: {0}'.format(e)])
arcpy.AddWarning((file, e))
def zipUpFolder(arcpy, folder, outZipFile, nclist):
try:
zip = zipfile.ZipFile(outZipFile, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
zipws(arcpy, str(folder), zip, 'CONTENTS_ONLY', nclist)
zip.close()
except RuntimeError:
pass
def flip_grid(array):
'''This function takes a two dimensional array and flips it up-down to
correct for the netCDF storage of these grids. The y-dimension must be the
last dimension (final axis) of the array.'''
return array[::-1] # Flip 2+D grid up-down
def Examine_Outputs(arcpy, in_zip, out_folder, skipfiles=[]):
"""This tool takes the output zip file from the ProcessGeogrid script and creates a raster
from each output NetCDF file. The Input should be a .zip file that was created using the
WRF Hydro pre-processing tools. The tool will create the folder which will contain the
results (out_folder), if that folder does not already exist."""
# Set environments and create scratch workspace
arcpy.env.overwriteOutput = True
out_sfolder = arcpy.CreateScratchName("temp", data_type="Folder", workspace=arcpy.env.scratchFolder)
os.mkdir(out_sfolder)
dellist = [] # Keep a directory of files to delete
# Unzip to a known location (make sure no other nc files live here)
ZipCompat(in_zip).extractall(out_sfolder)
# Iterate through unzipped files and copy to output directory as necessary
for dirpath, dirnames, filenames in os.walk(out_sfolder):
for file in filenames:
infile = os.path.join(dirpath, file)
dellist.append(infile)
# Copy skipped files over to new directory
if file in skipfiles:
shutil.copy2(infile, out_folder)
printMessages(arcpy, [' File Copied: {0}'.format(file)])
# Ignore the GEOGRID LDASOUT spatial metadata file
if file == LDASFile:
shutil.copy2(infile, out_folder)
printMessages(arcpy, [' File Copied: {0}'.format(file)])
continue
if file.endswith('.nc'):
# Trap to eliminate Parameter tables in NC format from this extraction
if file.endswith(LK_nc) or file.endswith(RT_nc) or file.endswith(GW_nc):
shutil.copy2(infile, out_folder)
printMessages(arcpy, [' File Created: {0}'.format(file)])
del file
continue
# Establish an object for reading the input NetCDF file
rootgrp = netCDF4.Dataset(infile, 'r')
if LooseVersion(netCDF4.__version__) > LooseVersion('1.4.0'):
rootgrp.set_auto_mask(False) # Change masked arrays to old default (numpy arrays always returned)
# Using netCDF4 library - still need point2, DX, DY
GT = rootgrp.variables[crsVar].GeoTransform.split(" ")
if 'esri_pe_string' in rootgrp.variables[crsVar].__dict__:
PE_string = rootgrp.variables[crsVar].esri_pe_string
elif 'spatial_ref' in rootgrp.variables[crsVar].__dict__:
PE_string = rootgrp.variables[crsVar].spatial_ref
printMessages(arcpy, [' GeoTransform: {0}'.format(GT)])
DX = float(GT[1])
DY = abs(float(GT[5])) # In GeoTransform, Y is usually negative
printMessages(arcpy, [' DX: {0}'.format(DX)])
printMessages(arcpy, [' DY: {0}'.format(DY)])
sr = arcpy.SpatialReference()
sr.loadFromString(PE_string.replace('"', "'"))
point = arcpy.Point(float(GT[0]), float(GT[3]) - float(DY*len(rootgrp.dimensions['y']))) # Calculate LLCorner value from GeoTransform (ULCorner)
arcpy.env.outputCoordinateSystem = sr
for variablename, ncvar in rootgrp.variables.items():
if ncvar.dimensions == ('y', 'x'):
outRasterLayer = variablename
outArr = numpy.array(ncvar[:])
nc_raster = arcpy.NumPyArrayToRaster(outArr, point, DX, DY)
outRasterFile = os.path.join(out_folder, outRasterLayer)
nc_raster.save(outRasterFile)
arcpy.CalculateStatistics_management(outRasterFile)
arcpy.DefineProjection_management(outRasterFile, sr)
printMessages(arcpy, [' File Created: {0}'.format(outRasterLayer)])
del nc_raster, variablename, outRasterLayer
rootgrp.close()
del file, infile, rootgrp, ncvar
continue
if file.endswith('.shp'):
newshp = os.path.join(out_folder, file)
arcpy.CopyFeatures_management(infile, newshp)
printMessages(arcpy, [' File Created: {0}'.format(str(file))])
del file, infile, newshp
continue
# These file formats are legacy output files, but allow the tool to work with older routing stacks
if file.endswith('.csv') or file.endswith('.TBL') or file.endswith('.txt') or file.endswith('.prj'):
# Change was made around 09/2017 to remove ASCII raster header from gw_basins_geogrid.txt ACII raster. Thus, ASCIIToRaster is no longer usable
shutil.copy2(infile, out_folder)
printMessages(arcpy, [' File Created: {0}'.format(file)])
del file, infile
continue
del dirpath, dirnames, filenames
# Remove each file from the temporary extraction directory
for infile in dellist:
os.remove(infile)
printMessages(arcpy, ['Extraction of WRF routing grids completed.'])
return out_sfolder
def add_CRS_var(rootgrp, sr, map_pro, CoordSysVarName, grid_mapping, PE_string, GeoTransformStr=None):
'''
10/13/2017 (KMS):
This function was added to generalize the creating of a CF-compliant
coordinate reference system variable. This was modularized in order to
create CRS variables for both gridded and point time-series CF-netCDF
files.
'''
# Scalar projection variable - http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/StandardCoordinateTransforms.html
proj_var = rootgrp.createVariable(CoordSysVarName, 'S1') # (Scalar Char variable)
proj_var.transform_name = grid_mapping # grid_mapping. grid_mapping_name is an alias for this
proj_var.grid_mapping_name = grid_mapping # for CF compatibility
proj_var.esri_pe_string = PE_string # For ArcGIS. Not required if esri_pe_string exists in the 2D variable attributes
proj_var.spatial_ref = PE_string # For GDAl
proj_var.long_name = "CRS definition" # Added 10/13/2017 by KMS to match GDAL format
proj_var.longitude_of_prime_meridian = 0.0 # Added 10/13/2017 by KMS to match GDAL format
if GeoTransformStr is not None:
proj_var.GeoTransform = GeoTransformStr # For GDAl - GeoTransform array