diff --git a/ControlTable_StreamCat.csv b/ControlTable_StreamCat.csv index 1333f81..e95dd42 100644 --- a/ControlTable_StreamCat.csv +++ b/ControlTable_StreamCat.csv @@ -1,56 +1,95 @@ FullTableName,accum_type,MetricName,AppendMetric,LandscapeLayer,summaryfield,Final_Table_Name,MetricType,Conversion,by_RPU,use_mask,run,notes +AgDrain,Categorical,lookup/AgDrain_lookup.csv,none,AgDrain_stlvl_FINAL.tif,,AgDrain,Percent,1,0,0,0, Ag2006HiSlp,Categorical,lookup/Ag2006HiSlp_lookup.csv,none,Ag2006HiSlp.tif,,AgMidHiSlopes,Percent,1,0,0,0, Ag2006MidSlp,Categorical,lookup/Ag2006MidSlp_lookup.csv,none,Ag2006MidSlp.tif,,AgMidHiSlopes,Percent,1,0,0,0, -AgDrain,Categorical,lookup/AgDrain_lookup.csv,none,AgDrain_stlvl_FINAL.tif,,AgDrain,Percent,1,0,0,0, -AgKffact,Continuous,AgKffact,none,AgKffact.tif,,Kffact,Mean,0.01,0,0,0, -Al2O3,Continuous,Al2O3,none,al20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +CBNF,Continuous,CBNF,none,cbnf.tif,,AgriculturalNitrogen,Mean,1,0,0,0, +Fert,Continuous,Fert,none,fert.tif,,AgriculturalNitrogen,Mean,1,0,0,0, +Manure,Continuous,Manure,none,manure.tif,,AgriculturalNitrogen,Mean,1,0,0,0, Aquifers,Categorical,lookup/Aquifer_Table_lookup.csv,none,us_aquifers_two.tif,,Aquifers,Percent,1,0,0,0, -AveSN_2008,Continuous,SN_2008,none,dep_splusn_2008.tif,,NADP,Mean,1,0,0,0, bedrock_perm,Continuous,lookup/bedrock_perm_lookup.csv,none,bedrock_perm.tif,,bedrock_perm,Mean,1,0,0,0, BFI,Continuous,BFI,none,bfi48grd.tif,,BFI,Mean,1,0,0,0, CanalDensity,Continuous,CanalDens,none,CanalsDitches.tif,,CanalDensity,Density,0.03,0,0,0, -CaO,Continuous,CaO,none,cao20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, -CBNF,Continuous,CBNF,none,cbnf.tif,,AgriculturalNitrogen,Mean,1,0,0,0, -Clay,Continuous,Clay,none,clay.tif,,STATSGO_Set1,Mean,0.01,0,0,0, CoalMines,Point,CoalMineDens,none,USTRAT.shp,,CoalMines,Density,1,0,0,0, -CompStrgth,Continuous,CompStrgth,none,ucs20mar14.tif,,GeoChemPhys4,Mean,1,0,0,0, +Phos_Ag_Balance,Continuous,Phos_Ag_Balance,none,Phos_Ag_Balance.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, +Phos_Crop_Uptake,Continuous,Phos_Crop_Uptake,none,Phos_Crop_Uptake.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, +Phos_Fert,Continuous,Phos_Fert,none,Phos_Fert.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, +Phos_Manure,Continuous,Phos_Manure,none,Phos_Manure.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, Dams,Point,DamDens,none,dams.shp,NIDStorM3;NrmStorM3,Dams,Density,1,0,0,0, Elev,Continuous,Elev,none,elev_cm,,Elevation,Mean,0.01,1,0,0,Elevation takes an extremely long time to run..by_RPU -Fe2O3,Continuous,Fe2O3,none,fe20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, -Fert,Continuous,Fert,none,fert.tif,,AgriculturalNitrogen,Mean,1,0,0,0, +NPDES,Point,NPDESDens,none,NPDES_Major.shp,,EPA_FRS,Density,1,0,0,0, +Superfund,Point,SuperfundDens,none,Superfund.shp,,EPA_FRS,Density,1,0,0,0, +TRI,Point,TRIDens,none,TRI.shp,,EPA_FRS,Density,1,0,0,0, +NPDES_RipBuf100,Point,NPDESDens,Rp100,NPDES_RpBuf100.shp,,EPA_FRSRipBuf100,Density,1,0,1,0, +Superfund_RipBuf100,Point,SuperfundDens,Rp100,Superfund_RpBuf100.shp,,EPA_FRSRipBuf100,Density,1,0,1,0, +TRI_RipBuf100,Point,TRIDens,Rp100,TRI_RpBuf100.shp,,EPA_FRSRipBuf100,Density,1,0,1,0, Fire2000,Continuous,PctFire2000,none,fire2000.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2000_RipBuf100,Continuous,PctFire2000,Rp100,fire2000.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2001,Continuous,PctFire2001,none,fire2001.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2001_RipBuf100,Continuous,PctFire2001,Rp100,fire2001.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2002,Continuous,PctFire2002,none,fire2002.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2002_RipBuf100,Continuous,PctFire2002,Rp100,fire2002.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2003,Continuous,PctFire2003,none,fire2003.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2003_RipBuf100,Continuous,PctFire2003,Rp100,fire2003.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2004,Continuous,PctFire2004,none,fire2004.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2004_RipBuf100,Continuous,PctFire2004,Rp100,fire2004.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2005,Continuous,PctFire2005,none,fire2005.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2005_RipBuf100,Continuous,PctFire2005,Rp100,fire2005.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2006,Continuous,PctFire2006,none,fire2006.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2006_RipBuf100,Continuous,PctFire2006,Rp100,fire2006.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2007,Continuous,PctFire2007,none,fire2007.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2007_RipBuf100,Continuous,PctFire2007,Rp100,fire2007.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2008,Continuous,PctFire2008,none,fire2008.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2008_RipBuf100,Continuous,PctFire2008,Rp100,fire2008.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2009,Continuous,PctFire2009,none,fire2009.tif,,FirePerimeters,Mean,100,0,0,0, -Fire2009_RipBuf100,Continuous,PctFire2009,Rp100,fire2009.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2010,Continuous,PctFire2010,none,fire2010.tif,,FirePerimeters,Mean,100,0,0,0, +Fire2000_RipBuf100,Continuous,PctFire2000,Rp100,fire2000.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2001_RipBuf100,Continuous,PctFire2001,Rp100,fire2001.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2002_RipBuf100,Continuous,PctFire2002,Rp100,fire2002.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2003_RipBuf100,Continuous,PctFire2003,Rp100,fire2003.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2004_RipBuf100,Continuous,PctFire2004,Rp100,fire2004.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2005_RipBuf100,Continuous,PctFire2005,Rp100,fire2005.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2006_RipBuf100,Continuous,PctFire2006,Rp100,fire2006.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2007_RipBuf100,Continuous,PctFire2007,Rp100,fire2007.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2008_RipBuf100,Continuous,PctFire2008,Rp100,fire2008.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +Fire2009_RipBuf100,Continuous,PctFire2009,Rp100,fire2009.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, Fire2010_RipBuf100,Continuous,PctFire2010,Rp100,fire2010.tif,,FirePerimetersRipBuf100,Mean,100,0,1,0, +PerDun,Continuous,PerDun,none,perdun.tif,,FlowMetrics,Mean,1,0,0,0, +PerHor,Continuous,PerHor,none,perhor.tif,,FlowMetrics,Mean,1,0,0,0, +PERMH,Continuous,PERMH,none,PERMH.tif,,FlowMetrics,Mean,1,0,0,0, +TMEAN_S_2013,Continuous,TMEAN_S_2013,none,TMEAN_S_2013.tif,,FlowMetrics,Mean,1,0,0,0, +TMEAN_S_2014,Continuous,TMEAN_S_2014,none,TMEAN_S_2014.tif,,FlowMetrics,Mean,1,0,0,0, +TMEAN_W_2013,Continuous,TMEAN_W_2013,none,TMEAN_W_2013.tif,,FlowMetrics,Mean,1,0,0,0, +TMEAN_W_2014,Continuous,TMEAN_W_2014,none,TMEAN_W_2014.tif,,FlowMetrics,Mean,1,0,0,0, +TMEANPW_2013,Continuous,TMEANPW_2013,none,TMEANPW_2013.tif,,FlowMetrics,Mean,1,0,0,0, +TMEANPW_2014,Continuous,TMEANPW_2014,none,TMEANPW_2014.tif,,FlowMetrics,Mean,1,0,0,0, +PctFrstLoss,Continuous,PctFrstLoss,none,loss.tif,,ForestLoss,Mean,1,0,0,0, +PctFrstLossByYear,Categorical,lookup/ForestLossByYear_lookup.csv,none,lossyr.tif,,ForestLossByYear0013,Percent,1,0,0,0, +PctFrstLossByYear_RipBuf100,Categorical,lookup/ForestLossByYear_lookup.csv,Rp100,lossyr.tif,,ForestLossByYear0013RipBuf100,Percent,1,0,1,0, +PctFrstLoss_RipBuf100,Continuous,PctFrstLoss_RipBuf100,Rp100,loss.tif,,ForestLossRipBuf100,Mean,1,0,1,0, fstfrz,Continuous,FST32F,none,fstfrz.tif,,fstfrz,Mean,1,0,0,0, -HUDen2010,Continuous,HUDen2010,none,HU_SQKM.tif,,USCensus2010,Mean,1,0,0,0, -HUDen2010_RipBuf100,Continuous,HUDen2010,Rp100,HU_SQKM.tif,,USCensus2010RipBuf100,Mean,1,0,1,0, -HydrlCond,Continuous,HydrlCond,none,perm20mar14.tif,,GeoChemPhys3,Mean,1,0,0,0, -InorgNWetDep_2008,Continuous,InorgNWetDep_2008,none,dep_totalN_2008.tif,,NADP,Mean,1,0,0,0, +Al2O3,Continuous,Al2O3,none,al20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +CaO,Continuous,CaO,none,cao20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +Fe2O3,Continuous,Fe2O3,none,fe20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, K2O,Continuous,K2O,none,k20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +MgO,Continuous,MgO,none,mgo20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +Na2O,Continuous,Na2O,none,na20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +P2O5,Continuous,P2O5,none,p20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +S,Continuous,S,none,s20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +SiO2,Continuous,SiO2,none,si20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +N,Continuous,N,none,n20mar14.tif,,GeoChemPhys2,Mean,1,0,0,0, +HydrlCond,Continuous,HydrlCond,none,perm20mar14.tif,,GeoChemPhys3,Mean,1,0,0,0, +CompStrgth,Continuous,CompStrgth,none,ucs20mar14.tif,,GeoChemPhys4,Mean,1,0,0,0, +PctImp2001,Continuous,PctImp2001,none,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001,Mean,1,0,0,0, +PctImp2001_HiSlp,Continuous,PctImp2001,Slp20,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001HiSlope,Mean,1,0,2,0, +PctImp2001_MidSlp,Continuous,PctImp2001,Slp10,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001MidSlope,Mean,1,0,3,0, +PctImp2001_RipBuf100,Continuous,PctImp2001,Rp100,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001RipBuf100,Mean,1,0,1,0, +PctImp2006,Continuous,PctImp2006,none,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006,Mean,1,0,0,0, +PctImp2006_HiSlp,Continuous,PctImp2006,Slp20,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006HiSlope,Mean,1,0,2,0, +PctImp2006_MidSlp,Continuous,PctImp2006,Slp10,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006MidSlope,Mean,1,0,3,0, +PctImp2006_RipBuf100,Continuous,PctImp2006,Rp100,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006RipBuf100,Mean,1,0,1,0, +PctImp2011,Continuous,PctImp2011,none,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011,Mean,1,0,0,0, +PctImp2011_HiSlp,Continuous,PctImp2011,Slp20,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011HiSlope,Mean,1,0,2,0, +PctImp2011_MidSlp,Continuous,PctImp2011,Slp10,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011MidSlope,Mean,1,0,3,0, +PctImp2011_RipBuf100,Continuous,PctImp2011,Rp100,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011RipBuf100,Mean,1,0,1,0, +PctImp2016,Continuous,PctImp2016,none,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016,Mean,1,0,0,0, +PctImp2016_HiSlp,Continuous,PctImp2016,Slp20,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016HiSlope,Mean,1,0,2,0, +PctImp2016_MidSlp,Continuous,PctImp2016,Slp10,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016MidSlope,Mean,1,0,3,0, +PctImp2016_RipBuf100,Continuous,PctImp2016,Rp100,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016RipBuf100,Mean,1,0,1,0, +AgKffact,Continuous,AgKffact,none,AgKffact.tif,,Kffact,Mean,0.01,0,0,0, Kffact,Continuous,Kffact,none,kffact.tif,,Kffact,Mean,0.01,0,0,0,used to create the pctfull on the rp100 rasters lith,Categorical,lookup/Lithology_lookup.csv,none,us_lithology_1km_dd83.tif,,Lithology,Percent,1,0,0,0, lstfrz,Continuous,LST32F,none,lstfrz.tif,,lstfrz,Mean,1,0,0,0, -Manure,Continuous,Manure,none,manure.tif,,AgriculturalNitrogen,Mean,1,0,0,0, -MgO,Continuous,MgO,none,mgo20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, Mine,Point,MineDens,none,mines.shp,,Mines,Density,1,0,0,0, Mine_RipBuf100,Point,MineDens,Rp100,mines_RpBuf100.shp,,MinesRipBuf100,Density,1,0,1,0, MODIS_Ir_Ag_2002,Categorical,lookup/MODIS_IrAg_lookup.csv,none,mirad250_02.tif,,MODIS_IrrigAg,Percent,1,0,0,0, @@ -89,115 +128,62 @@ MTBS_Severity_2013,Categorical,lookup/MTBS_severity_lookup.csv,none,MTBS_Severit MTBS_Severity_2014,Categorical,lookup/MTBS_severity_lookup.csv,none,MTBS_Severity_2014.tif,,MTBS_Severity_2014,Percent,1,0,0,0, MTBS_Severity_2015,Categorical,lookup/MTBS_severity_lookup.csv,none,MTBS_Severity_2015.tif,,MTBS_Severity_2015,Percent,1,0,0,0, MTBS_Severity_2016,Categorical,lookup/MTBS_severity_lookup.csv,none,MTBS_Severity_2016.tif,,MTBS_Severity_2016,Percent,1,0,0,0, -N,Continuous,N,none,n20mar14.tif,,GeoChemPhys2,Mean,1,0,0,0, -N_TW2005,Continuous,N_TW2005,none,n_tw2005.tif,,TDEP,Mean,1,0,0,0, -N_TW2006,Continuous,N_TW2006,none,n_tw2006.tif,,TDEP,Mean,1,0,0,0, -N_TW2007,Continuous,N_TW2007,none,n_tw2007.tif,,TDEP,Mean,1,0,0,0, -N_TW2008,Continuous,N_TW2008,none,n_tw2008.tif,,TDEP,Mean,1,0,0,0, -N_TW2009,Continuous,N_TW2009,none,n_tw2009.tif,,TDEP,Mean,1,0,0,0, -N_TW2010,Continuous,N_TW2010,none,n_tw2010.tif,,TDEP,Mean,1,0,0,0, -N_TW2011,Continuous,N_TW2011,none,n_tw2011.tif,,TDEP,Mean,1,0,0,0, -N_TW2012,Continuous,N_TW2012,none,n_tw2012.tif,,TDEP,Mean,1,0,0,0, -Na2O,Continuous,Na2O,none,na20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, NABD,Point,NABD_Dens,none,NABD.shp,NIDStorM3;NrmStorM3,NABD,Density,1,0,0,0, +AveSN_2008,Continuous,SN_2008,none,dep_splusn_2008.tif,,NADP,Mean,1,0,0,0, +InorgNWetDep_2008,Continuous,InorgNWetDep_2008,none,dep_totalN_2008.tif,,NADP,Mean,1,0,0,0, NH4_2008,Continuous,NH4_2008,none,dep_nh4_2008.tif,,NADP,Mean,1,0,0,0, +NO3_2008,Continuous,NO3_2008,none,dep_no3_2008.tif,,NADP,Mean,1,0,0,0, +SN_2008,Continuous,SN_2008,none,dep_splusn_2008.tif,,NADP,Mean,1,0,0,0, +NH4_2014,Continuous,NH4_2014,none,NH4_dep_2014.tif,,NADP_2014_2018,Mean,1,0,0,0, +NH4_2015,Continuous,NH4_2015,none,NH4_dep_2015.tif,,NADP_2014_2018,Mean,1,0,0,0, +NH4_2016,Continuous,NH4_2016,none,NH4_dep_2016.tif,,NADP_2014_2018,Mean,1,0,0,0, +NH4_2017,Continuous,NH4_2017,none,NH4_dep_2017.tif,,NADP_2014_2018,Mean,1,0,0,0, +NH4_2018,Continuous,NH4_2018,none,NH4_dep_2018.tif,,NADP_2014_2018,Mean,1,0,0,0, +NO3_2014,Continuous,NO3_2014,none,NO3_dep_2014.tif,,NADP_2014_2018,Mean,1,0,0,0, +NO3_2015,Continuous,NO3_2015,none,NO3_dep_2015.tif,,NADP_2014_2018,Mean,1,0,0,0, +NO3_2016,Continuous,NO3_2016,none,NO3_dep_2016.tif,,NADP_2014_2018,Mean,1,0,0,0, +NO3_2017,Continuous,NO3_2017,none,NO3_dep_2017.tif,,NADP_2014_2018,Mean,1,0,0,0, +NO3_2018,Continuous,NO3_2018,none,NO3_dep_2018.tif,,NADP_2014_2018,Mean,1,0,0,0, nlcd2001,Categorical,lookup/NLCD2001_lookup.csv,none,NLCD_2001_Land_Cover_L48_20190424.tif,,NLCD2001,Percent,1,0,0,0, -nlcd2001_RipBuf100,Categorical,lookup/NLCD2001_lookup.csv,Rp100,NLCD_2001_Land_Cover_L48_20190424.tif,,NLCD2001RipBuf100,Percent,1,0,1,0, nlcd2001_HiSlp,Categorical,lookup/NLCD2001_lookup.csv,Slp20,NLCD_2001_Land_Cover_L48_20190424.tif,,NLCD2001HiSlope,Percent,1,0,3,0, nlcd2001_MidSlp,Categorical,lookup/NLCD2001_lookup.csv,Slp10,NLCD_2001_Land_Cover_L48_20190424.tif,,NLCD2001MidSlope,Percent,1,0,2,0, +nlcd2001_RipBuf100,Categorical,lookup/NLCD2001_lookup.csv,Rp100,NLCD_2001_Land_Cover_L48_20190424.tif,,NLCD2001RipBuf100,Percent,1,0,1,0, nlcd2004,Categorical,lookup/NLCD2004_lookup.csv,none,NLCD_2004_Land_Cover_L48_20190424.tif,,NLCD2004,Percent,1,0,0,0, -nlcd2004_RipBuf100,Categorical,lookup/NLCD2004_lookup.csv,Rp100,NLCD_2004_Land_Cover_L48_20190424.tif,,NLCD2004RipBuf100,Percent,1,0,1,0, nlcd2004_HiSlp,Categorical,lookup/NLCD2004_lookup.csv,Slp20,NLCD_2004_Land_Cover_L48_20190424.tif,,NLCD2004HiSlope,Percent,1,0,2,0, nlcd2004_MidSlp,Categorical,lookup/NLCD2004_lookup.csv,Slp10,NLCD_2004_Land_Cover_L48_20190424.tif,,NLCD2004MidSlope,Percent,1,0,3,0, +nlcd2004_RipBuf100,Categorical,lookup/NLCD2004_lookup.csv,Rp100,NLCD_2004_Land_Cover_L48_20190424.tif,,NLCD2004RipBuf100,Percent,1,0,1,0, nlcd2006,Categorical,lookup/NLCD2006_lookup.csv,none,NLCD_2006_Land_Cover_L48_20190424.tif,,NLCD2006,Percent,1,0,0,0, -nlcd2006_RipBuf100,Categorical,lookup/NLCD2006_lookup.csv,Rp100,NLCD_2006_Land_Cover_L48_20190424.tif,,NLCD2006RipBuf100,Percent,1,0,1,0, nlcd2006_HiSlp,Categorical,lookup/NLCD2006_lookup.csv,Slp20,NLCD_2006_Land_Cover_L48_20190424.tif,,NLCD2006HiSlope,Percent,1,0,2,0, nlcd2006_MidSlp,Categorical,lookup/NLCD2006_lookup.csv,Slp10,NLCD_2006_Land_Cover_L48_20190424.tif,,NLCD2006MidSlope,Percent,1,0,3,0, +nlcd2006_RipBuf100,Categorical,lookup/NLCD2006_lookup.csv,Rp100,NLCD_2006_Land_Cover_L48_20190424.tif,,NLCD2006RipBuf100,Percent,1,0,1,0, nlcd2008,Categorical,lookup/NLCD2008_lookup.csv,none,NLCD_2008_Land_Cover_L48_20190424.tif,,NLCD2008,Percent,1,0,0,0, -nlcd2008_RipBuf100,Categorical,lookup/NLCD2008_lookup.csv,Rp100,NLCD_2008_Land_Cover_L48_20190424.tif,,NLCD2008RipBuf100,Percent,1,0,1,0, nlcd2008_HiSlp,Categorical,lookup/NLCD2008_lookup.csv,Slp20,NLCD_2008_Land_Cover_L48_20190424.tif,,NLCD2008HiSlope,Percent,1,0,2,0, nlcd2008_MidSlp,Categorical,lookup/NLCD2008_lookup.csv,Slp10,NLCD_2008_Land_Cover_L48_20190424.tif,,NLCD2008MidSlope,Percent,1,0,3,0, +nlcd2008_RipBuf100,Categorical,lookup/NLCD2008_lookup.csv,Rp100,NLCD_2008_Land_Cover_L48_20190424.tif,,NLCD2008RipBuf100,Percent,1,0,1,0, nlcd2011,Categorical,lookup/NLCD2011_lookup.csv,none,NLCD_2011_Land_Cover_L48_20190424.tif,,NLCD2011,Percent,1,0,0,0, -nlcd2011_RipBuf100,Categorical,lookup/NLCD2011_lookup.csv,Rp100,NLCD_2011_Land_Cover_L48_20190424.tif,,NLCD2011RipBuf100,Percent,1,0,1,0, nlcd2011_HiSlp,Categorical,lookup/NLCD2011_lookup.csv,Slp20,NLCD_2011_Land_Cover_L48_20190424.tif,,NLCD2011HiSlope,Percent,1,0,2,0, nlcd2011_MidSlp,Categorical,lookup/NLCD2011_lookup.csv,Slp10,NLCD_2011_Land_Cover_L48_20190424.tif,,NLCD2011MidSlope,Percent,1,0,3,0, +nlcd2011_RipBuf100,Categorical,lookup/NLCD2011_lookup.csv,Rp100,NLCD_2011_Land_Cover_L48_20190424.tif,,NLCD2011RipBuf100,Percent,1,0,1,0, nlcd2013,Categorical,lookup/NLCD2013_lookup.csv,none,NLCD_2013_Land_Cover_L48_20190424.tif,,NLCD2013,Percent,1,0,0,0, -nlcd2013_RipBuf100,Categorical,lookup/NLCD2013_lookup.csv,Rp100,NLCD_2013_Land_Cover_L48_20190424.tif,,NLCD2013RipBuf100,Percent,1,0,1,0, nlcd2013_HiSlp,Categorical,lookup/NLCD2013_lookup.csv,Slp20,NLCD_2013_Land_Cover_L48_20190424.tif,,NLCD2013HiSlope,Percent,1,0,2,0, nlcd2013_MidSlp,Categorical,lookup/NLCD2013_lookup.csv,Slp10,NLCD_2013_Land_Cover_L48_20190424.tif,,NLCD2013MidSlope,Percent,1,0,3,0, +nlcd2013_RipBuf100,Categorical,lookup/NLCD2013_lookup.csv,Rp100,NLCD_2013_Land_Cover_L48_20190424.tif,,NLCD2013RipBuf100,Percent,1,0,1,0, nlcd2016,Categorical,lookup/NLCD2016_lookup.csv,none,NLCD_2016_Land_Cover_L48_20190424.tif,,NLCD2016,Percent,1,0,0,0, -nlcd2016_RipBuf100,Categorical,lookup/NLCD2016_lookup.csv,Rp100,NLCD_2016_Land_Cover_L48_20190424.tif,,NLCD2016RipBuf100,Percent,1,0,1,0, nlcd2016_HiSlp,Categorical,lookup/NLCD2016_lookup.csv,Slp20,NLCD_2016_Land_Cover_L48_20190424.tif,,NLCD2016HiSlope,Percent,1,0,2,0, nlcd2016_MidSlp,Categorical,lookup/NLCD2016_lookup.csv,Slp10,NLCD_2016_Land_Cover_L48_20190424.tif,,NLCD2016MidSlope,Percent,1,0,3,0, -NH4_2014,Continuous,NH4_2014,none,NH4_dep_2014.tif,,NADP_2014_2018,Mean,1,0,0,0, -NH4_2015,Continuous,NH4_2015,none,NH4_dep_2015.tif,,NADP_2014_2018,Mean,1,0,0,0, -NH4_2016,Continuous,NH4_2016,none,NH4_dep_2016.tif,,NADP_2014_2018,Mean,1,0,0,0, -NH4_2017,Continuous,NH4_2017,none,NH4_dep_2017.tif,,NADP_2014_2018,Mean,1,0,0,0, -NH4_2018,Continuous,NH4_2018,none,NH4_dep_2018.tif,,NADP_2014_2018,Mean,1,0,0,0, -NO3_2014,Continuous,NO3_2014,none,NO3_dep_2014.tif,,NADP_2014_2018,Mean,1,0,0,0, -NO3_2015,Continuous,NO3_2015,none,NO3_dep_2015.tif,,NADP_2014_2018,Mean,1,0,0,0, -NO3_2016,Continuous,NO3_2016,none,NO3_dep_2016.tif,,NADP_2014_2018,Mean,1,0,0,0, -NO3_2017,Continuous,NO3_2017,none,NO3_dep_2017.tif,,NADP_2014_2018,Mean,1,0,0,0, -NO3_2018,Continuous,NO3_2018,none,NO3_dep_2018.tif,,NADP_2014_2018,Mean,1,0,0,0, -NO3_2008,Continuous,NO3_2008,none,dep_no3_2008.tif,,NADP,Mean,1,0,0,0, -NOXI_TW2005,Continuous,NOXI_TW2005,none,noxi_tw2005.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2006,Continuous,NOXI_TW2006,none,noxi_tw2006.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2007,Continuous,NOXI_TW2007,none,noxi_tw2007.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2008,Continuous,NOXI_TW2008,none,noxi_tw2008.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2009,Continuous,NOXI_TW2009,none,noxi_tw2009.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2010,Continuous,NOXI_TW2010,none,noxi_tw2010.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2011,Continuous,NOXI_TW2011,none,noxi_tw2011.tif,,TDEP,Mean,1,0,0,0, -NOXI_TW2012,Continuous,NOXI_TW2012,none,noxi_tw2012.tif,,TDEP,Mean,1,0,0,0, -NPDES,Point,NPDESDens,none,NPDES_Major.shp,,EPA_FRS,Density,1,0,0,0, -NPDES_RipBuf100,Point,NPDESDens,Rp100,NPDES_RpBuf100.shp,,EPA_FRSRipBuf100,Density,1,0,1,0, -NRED_TW2005,Continuous,NRED_TW2005,none,nred_tw2005.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2006,Continuous,NRED_TW2006,none,nred_tw2006.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2007,Continuous,NRED_TW2007,none,nred_tw2007.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2008,Continuous,NRED_TW2008,none,nred_tw2008.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2009,Continuous,NRED_TW2009,none,nred_tw2009.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2010,Continuous,NRED_TW2010,none,nred_tw2010.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2011,Continuous,NRED_TW2011,none,nred_tw2011.tif,,TDEP,Mean,1,0,0,0, -NRED_TW2012,Continuous,NRED_TW2012,none,nred_tw2012.tif,,TDEP,Mean,1,0,0,0, -Om,Continuous,Om,none,om.tif,,STATSGO_Set2,Mean,0.01,0,0,0, -P2O5,Continuous,P2O5,none,p20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, -PADUS,Categorical,lookup/GAP_Status_lookup.csv,none,gap_sts.tif,,PADUS,Percent,1,0,0,0, -PctFrstLoss,Continuous,PctFrstLoss,none,loss.tif,,ForestLoss,Mean,1,0,0,0, -PctFrstLoss_RipBuf100,Continuous,PctFrstLoss_RipBuf100,Rp100,loss.tif,,ForestLossRipBuf100,Mean,1,0,1,0, -PctFrstLossByYear,Categorical,lookup/ForestLossByYear_lookup.csv,none,lossyr.tif,,ForestLossByYear0013,Percent,1,0,0,0, -PctFrstLossByYear_RipBuf100,Categorical,lookup/ForestLossByYear_lookup.csv,Rp100,lossyr.tif,,ForestLossByYear0013RipBuf100,Percent,1,0,1,0, -PctImp2001,Continuous,PctImp2001,none,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001,Mean,1,0,0,0, -PctImp2001_HiSlp,Continuous,PctImp2001,Slp20,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001HiSlope,Mean,1,0,2,0, -PctImp2001_MidSlp,Continuous,PctImp2001,Slp10,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001MidSlope,Mean,1,0,3,0, -PctImp2001_RipBuf100,Continuous,PctImp2001,Rp100,NLCD_2001_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2001RipBuf100,Mean,1,0,1,0, -PctImp2006,Continuous,PctImp2006,none,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006,Mean,1,0,0,0, -PctImp2006_HiSlp,Continuous,PctImp2006,Slp20,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006HiSlope,Mean,1,0,2,0, -PctImp2006_MidSlp,Continuous,PctImp2006,Slp10,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006MidSlope,Mean,1,0,3,0, -PctImp2006_RipBuf100,Continuous,PctImp2006,Rp100,NLCD_2006_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2006RipBuf100,Mean,1,0,1,0, -PctImp2011,Continuous,PctImp2011,none,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011,Mean,1,0,0,0, -PctImp2011_HiSlp,Continuous,PctImp2011,Slp20,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011HiSlope,Mean,1,0,2,0, -PctImp2011_MidSlp,Continuous,PctImp2011,Slp10,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011MidSlope,Mean,1,0,3,0, -PctImp2011_RipBuf100,Continuous,PctImp2011,Rp100,NLCD_2011_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2011RipBuf100,Mean,1,0,1,0, -PctImp2016,Continuous,PctImp2016,none,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016,Mean,1,0,0,0, -PctImp2016_HiSlp,Continuous,PctImp2016,Slp20,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016HiSlope,Mean,1,0,2,0, -PctImp2016_MidSlp,Continuous,PctImp2016,Slp10,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016MidSlope,Mean,1,0,3,0, -PctImp2016_RipBuf100,Continuous,PctImp2016,Rp100,NLCD_2016_Impervious_descriptor_L48_20190405.tif,,ImperviousSurfaces2016RipBuf100,Mean,1,0,1,0, +nlcd2016_RipBuf100,Categorical,lookup/NLCD2016_lookup.csv,Rp100,NLCD_2016_Land_Cover_L48_20190424.tif,,NLCD2016RipBuf100,Percent,1,0,1,0, PctNonAgIntrodManagVeg,Continuous,PctNonAgIntrodManagVeg,none,IntrodManagVeg.tif,,NonAgIntrodManagVeg,Mean,100,0,0,0, PctNonAgIntrodManagVeg_RipBuf100,Continuous,PctNonAgIntrodManagVeg,Rp100,IntrodManagVegRipBuf100.tif,,NonAgIntrodManagVegRipBuf100,Mean,100,0,1,0, -PerDun,Continuous,PerDun,none,perdun.tif,,FlowMetrics,Mean,1,0,0,0, -PerHor,Continuous,PerHor,none,perhor.tif,,FlowMetrics,Mean,1,0,0,0, -Perm,Continuous,Perm,none,perm.tif,,STATSGO_Set2,Mean,0.01,0,0,0, -PERMH,Continuous,PERMH,none,PERMH.tif,,FlowMetrics,Mean,1,0,0,0, +PADUS,Categorical,lookup/GAP_Status_lookup.csv,none,gap_sts.tif,,PADUS,Percent,1,0,0,0, Pestic97,Continuous,Pestic97,none,pestic.tif,,Pestic97,Mean,1,0,0,0, -Phos_Ag_Balance,Continuous,Phos_Ag_Balance,none,Phos_Ag_Balance.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, -Phos_Crop_Uptake,Continuous,Phos_Crop_Uptake,none,Phos_Crop_Uptake.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, -Phos_Fert,Continuous,Phos_Fert,none,Phos_Fert.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, -Phos_Manure,Continuous,Phos_Manure,none,Phos_Manure.tif,,Compton_Phos_Inputs,Mean,1,0,0,0, pmax,Continuous,PMAX,none,pmax_usgs.tif,,pmax,Mean,1,0,0,0, pmin,Continuous,PMIN,none,pmin_usgs.tif,,pmin,Mean,1,0,0,0, PointN,Point,PointNSum,none,PointN_usgs.shp,kgn_new,PointN,Density,1,0,0,0, -PopDen2010,Continuous,PopDen2010,none,POP_SQKM.tif,,USCensus2010,Mean,1,0,0,0, -PopDen2010_RipBuf100,Continuous,PopDen2010,Rp100,POP_SQKM.tif,,USCensus2010RipBuf100,Mean,1,0,1,0, +Precip_Minus_EVT,Continuous,Precip_Minus_EVT,none,USAavgPeriod_pptSurp_1994to2016.tif,,Precip_Minus_EVT,Mean,1,0,0,0, precip,Continuous,Precip,none,precip.tif,,PRISM,Mean,1,0,0,0, +tmax,Continuous,Tmax,none,tmax.tif,,PRISM,Mean,1,0,0,0, +tmean,Continuous,Tmean,none,tmean.tif,,PRISM,Mean,1,0,0,0, +tmin,Continuous,Tmin,none,tmin.tif,,PRISM,Mean,1,0,0,0, PRISMppt_2008,Continuous,Precip08,none,PRISMppt_2008.tif,,PRISM_0809,Mean,1,0,0,0, PRISMppt_2009,Continuous,Precip09,none,PRISMppt_2009.tif,,PRISM_0809,Mean,1,0,0,0, PRISMtmean_2008,Continuous,Tmean08,none,PRISMtmean_2008.tif,,PRISM_0809,Mean,1,0,0,0, @@ -207,12 +193,42 @@ PSUMPY_2009,Continuous,PSUMPY_2009,none,PSUMPY_2009.tif,,PSUMPY_2009,Mean,1,0,0, PSUMPY_2013,Continuous,PSUMPY_2013,none,PSUMPY_2013.tif,,PSUMPY_2013,Mean,1,0,0,0, PSUMPY_2014,Continuous,PSUMPY_2014,none,PSUMPY_2014.tif,,PSUMPY_2014,Mean,1,0,0,0, PsumWs,Continuous,PsumWs,none,psum.tif,,PsumWs,Mean,1,0,0,0, -RckDep,Continuous,RckDep,none,rckdep.tif,,STATSGO_Set2,Mean,0.01,0,0,0, -RdCrs,Continuous,RdCrs,none,rdstcrs.tif,SlpWtd,RoadStreamCrossings,Density,0.03,0,0,0, RdDens,Continuous,RdDens,none,roadden.tif,,RoadDensity,Mean,1,0,0,0, RdDens_RipBuf100,Continuous,RdDens,Rp100,roadden.tif,,RoadDensityRipBuf100,Mean,1,0,1,0, +RdCrs,Continuous,RdCrs,none,rdstcrs.tif,SlpWtd,RoadStreamCrossings,Density,0.03,0,0,0, +RockN,Continuous,RockN,none,RockN_USA_USGSproj_1km_kgkm2.tif,,RockN,Mean,1,0,0,0, Runoff,Continuous,Runoff,none,runoff.tif,,Runoff,Mean,1,0,0,0, -S,Continuous,S,none,s20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, +Clay,Continuous,Clay,none,clay.tif,,STATSGO_Set1,Mean,0.01,0,0,0, +Sand,Continuous,Sand,none,sand.tif,,STATSGO_Set1,Mean,0.01,0,0,0, +Om,Continuous,Om,none,om.tif,,STATSGO_Set2,Mean,0.01,0,0,0, +Perm,Continuous,Perm,none,perm.tif,,STATSGO_Set2,Mean,0.01,0,0,0, +RckDep,Continuous,RckDep,none,rckdep.tif,,STATSGO_Set2,Mean,0.01,0,0,0, +WtDep,Continuous,WtDep,none,wtdep.tif,,STATSGO_Set2,Mean,0.01,0,0,0, +sw_flux,Continuous,sw_flux,none,TN_load_px.tif,,sw_flux,Mean,1,0,0,0, +N_TW2005,Continuous,N_TW2005,none,n_tw2005.tif,,TDEP,Mean,1,0,0,0, +N_TW2006,Continuous,N_TW2006,none,n_tw2006.tif,,TDEP,Mean,1,0,0,0, +N_TW2007,Continuous,N_TW2007,none,n_tw2007.tif,,TDEP,Mean,1,0,0,0, +N_TW2008,Continuous,N_TW2008,none,n_tw2008.tif,,TDEP,Mean,1,0,0,0, +N_TW2009,Continuous,N_TW2009,none,n_tw2009.tif,,TDEP,Mean,1,0,0,0, +N_TW2010,Continuous,N_TW2010,none,n_tw2010.tif,,TDEP,Mean,1,0,0,0, +N_TW2011,Continuous,N_TW2011,none,n_tw2011.tif,,TDEP,Mean,1,0,0,0, +N_TW2012,Continuous,N_TW2012,none,n_tw2012.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2005,Continuous,NOXI_TW2005,none,noxi_tw2005.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2006,Continuous,NOXI_TW2006,none,noxi_tw2006.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2007,Continuous,NOXI_TW2007,none,noxi_tw2007.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2008,Continuous,NOXI_TW2008,none,noxi_tw2008.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2009,Continuous,NOXI_TW2009,none,noxi_tw2009.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2010,Continuous,NOXI_TW2010,none,noxi_tw2010.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2011,Continuous,NOXI_TW2011,none,noxi_tw2011.tif,,TDEP,Mean,1,0,0,0, +NOXI_TW2012,Continuous,NOXI_TW2012,none,noxi_tw2012.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2005,Continuous,NRED_TW2005,none,nred_tw2005.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2006,Continuous,NRED_TW2006,none,nred_tw2006.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2007,Continuous,NRED_TW2007,none,nred_tw2007.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2008,Continuous,NRED_TW2008,none,nred_tw2008.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2009,Continuous,NRED_TW2009,none,nred_tw2009.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2010,Continuous,NRED_TW2010,none,nred_tw2010.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2011,Continuous,NRED_TW2011,none,nred_tw2011.tif,,TDEP,Mean,1,0,0,0, +NRED_TW2012,Continuous,NRED_TW2012,none,nred_tw2012.tif,,TDEP,Mean,1,0,0,0, S_TW2005,Continuous,S_TW2005,none,s_tw2005.tif,,TDEP,Mean,1,0,0,0, S_TW2006,Continuous,S_TW2006,none,s_tw2006.tif,,TDEP,Mean,1,0,0,0, S_TW2007,Continuous,S_TW2007,none,s_tw2007.tif,,TDEP,Mean,1,0,0,0, @@ -221,34 +237,18 @@ S_TW2009,Continuous,S_TW2009,none,s_tw2009.tif,,TDEP,Mean,1,0,0,0, S_TW2010,Continuous,S_TW2010,none,s_tw2010.tif,,TDEP,Mean,1,0,0,0, S_TW2011,Continuous,S_TW2011,none,s_tw2011.tif,,TDEP,Mean,1,0,0,0, S_TW2012,Continuous,S_TW2012,none,s_tw2012.tif,,TDEP,Mean,1,0,0,0, -Sand,Continuous,Sand,none,sand.tif,,STATSGO_Set1,Mean,0.01,0,0,0, -SiO2,Continuous,SiO2,none,si20mar14.tif,,GeoChemPhys1,Mean,1,0,0,0, -SN_2008,Continuous,SN_2008,none,dep_splusn_2008.tif,,NADP,Mean,1,0,0,0, -Superfund,Point,SuperfundDens,none,Superfund.shp,,EPA_FRS,Density,1,0,0,0, -Superfund_RipBuf100,Point,SuperfundDens,Rp100,Superfund_RpBuf100.shp,,EPA_FRSRipBuf100,Density,1,0,1,0, -tmax,Continuous,Tmax,none,tmax.tif,,PRISM,Mean,1,0,0,0, -tmean,Continuous,Tmean,none,tmean.tif,,PRISM,Mean,1,0,0,0, -TMEAN_S_2013,Continuous,TMEAN_S_2013,none,TMEAN_S_2013.tif,,FlowMetrics,Mean,1,0,0,0, -TMEAN_S_2014,Continuous,TMEAN_S_2014,none,TMEAN_S_2014.tif,,FlowMetrics,Mean,1,0,0,0, -TMEAN_W_2013,Continuous,TMEAN_W_2013,none,TMEAN_W_2013.tif,,FlowMetrics,Mean,1,0,0,0, -TMEAN_W_2014,Continuous,TMEAN_W_2014,none,TMEAN_W_2014.tif,,FlowMetrics,Mean,1,0,0,0, TMEANPW_2008,Continuous,TMEANPW_2008,none,TMEANPW_2008.tif,,TMEANPW_2008,Mean,1,0,0,0, TMEANPW_2009,Continuous,TMEANPW_2009,none,TMEANPW_2009.tif,,TMEANPW_2009,Mean,1,0,0,0, -TMEANPW_2013,Continuous,TMEANPW_2013,none,TMEANPW_2013.tif,,FlowMetrics,Mean,1,0,0,0, -TMEANPW_2014,Continuous,TMEANPW_2014,none,TMEANPW_2014.tif,,FlowMetrics,Mean,1,0,0,0, TMEANSS_2008,Continuous,TMEANSS_2008,none,TMEAN_S_2008.tif,,TMEANSS_2008,Mean,1,0,0,0, TMEANSS_2009,Continuous,TMEANSS_2009,none,TMEAN_S_2009.tif,,TMEANSS_2009,Mean,1,0,0,0, TMEANSY_2008,Continuous,TMEANSY_2008,none,TMEANSY_2008.tif,,TMEANSY_2008,Mean,1,0,0,0, TMEANSY_2009,Continuous,TMEANSY_2009,none,TMEANSY_2009.tif,,TMEANSY_2009,Mean,1,0,0,0, -tmin,Continuous,Tmin,none,tmin.tif,,PRISM,Mean,1,0,0,0, -TRI,Point,TRIDens,none,TRI.shp,,EPA_FRS,Density,1,0,0,0, -TRI_RipBuf100,Point,TRIDens,Rp100,TRI_RpBuf100.shp,,EPA_FRSRipBuf100,Density,1,0,1,0, US_Level_III_Ecoregions,Categorical,lookup/US_Level_III_Ecoregions_lookup.csv,none,US_Level_III_Ecoregions.tif,,US_Level_III_Ecoregions,Percent,1,0,0,0, +HUDen2010,Continuous,HUDen2010,none,HU_SQKM.tif,,USCensus2010,Mean,1,0,0,0, +PopDen2010,Continuous,PopDen2010,none,POP_SQKM.tif,,USCensus2010,Mean,1,0,0,0, +HUDen2010_RipBuf100,Continuous,HUDen2010,Rp100,HU_SQKM.tif,,USCensus2010RipBuf100,Mean,1,0,1,0, +PopDen2010_RipBuf100,Continuous,PopDen2010,Rp100,POP_SQKM.tif,,USCensus2010RipBuf100,Mean,1,0,1,0, WetnessIndex,Continuous,WetIndex,none,cti_v3.tif,,WetIndx,Mean,1,0,0,0, -WtDep,Continuous,WtDep,none,wtdep.tif,,STATSGO_Set2,Mean,0.01,0,0,0, WWTPAll,Point,WWTPAllDens,none,WWTP_All_CWA_Active_2013_CONUS.shp,,WWTP,Density,1,0,0,0, WWTPMajor,Point,WWTPMajorDens,none,WWTP_Major_CWA_Active_2013_CONUS.shp,,WWTP,Density,1,0,0,0, WWTPMinor,Point,WWTPMinorDens,none,WWTP_Minor_CWA_Active_2013_CONUS.shp,,WWTP,Density,1,0,0,0, -Precip_Minus_EVT,Continuous,Precip_Minus_EVT,none,USAavgPeriod_pptSurp_1994to2016.tif,,Precip_Minus_EVT,Mean,1,0,0,0, -RockN,Continuous,RockN,none,RockN_USA_USGSproj_1km_kgkm2.tif,,RockN,Mean,1,0,0,0, -sw_flux,Continuous,sw_flux,none,TN_load_px.tif,,sw_flux,Mean,1,0,0,0, diff --git a/MakeFinalTables.py b/MakeFinalTables.py index 1f55665..fa369a9 100644 --- a/MakeFinalTables.py +++ b/MakeFinalTables.py @@ -15,7 +15,7 @@ import zipfile import numpy as np import pandas as pd -from pathlib2 import Path +from pathlib import Path from stream_cat_config import OUT_DIR, LENGTHS, FINAL_DIR @@ -38,9 +38,8 @@ def build_stats(tbl, stats): FINAL_DIR = Path(FINAL_DIR) # TODO: change this in the config ctl = pd.read_csv("ControlTable_StreamCat.csv") # TODO move CONTROL_TABLE to config -inputs = np.load("accum_npy/vpu_inputs.npy").item() +inputs = np.load("accum_npy/vpu_inputs.npy", allow_pickle=True).item() -tables = dict() runners = ctl.query("run == 1").groupby("Final_Table_Name") tables = runners["FullTableName"].unique().to_dict() # check that all accumulated files are present @@ -55,23 +54,29 @@ def build_stats(tbl, stats): if len(missing) > 0: for miss in missing: - print("Missing {}".format(miss.name)) - print "Check output from StreamCat.py" + print(f"Missing {miss.name}") + print("Check output from StreamCat.py") sys.exit() states_lookup = Path("state_dict.npz") -states_dict = np.load(str(states_lookup))["data"].item() +states_dict = np.load(str(states_lookup), + allow_pickle=True, + encoding="latin1")["data"].item() STATES_DIR = FINAL_DIR.parents[0] / "States" if not FINAL_DIR.exists(): FINAL_DIR.mkdir(parents=True) +if not (FINAL_DIR / "zips").exists(): (FINAL_DIR / "zips").mkdir() +if not STATES_DIR.exists(): STATES_DIR.mkdir() +if not (STATES_DIR / "zips").exists(): + (STATES_DIR / "zips").mkdir() region_fn = "{}_Region{}.csv" for table, metrics in tables.items(): - print("Running {} .....into {}".format(table, FINAL_DIR)) + print(f"Running {table} .....into {FINAL_DIR}") # this will print stats for every final table, used for metadata stats = dict() # Looop through NHD Hydro-regions @@ -243,39 +248,5 @@ def build_stats(tbl, stats): print(table) for stat in stats: - print stat + " " + str(stats[stat]) - print "All Done....." - -########################### -#table = "RoadStreamCrossings" -#aa = [] -#for f in os.listdir(str(REDO_DIR)): -# s = f.split("_Region")[0] -# if not s in aa: -# aa.append(f.split("_Region")[0]) -#FINAL_DIR = Path("L:/Priv/CORFiles/Geospatial_Library_Projects/StreamCat/FTP_Staging/HydroRegions") -#for table in tables: -#for table in aa[49:-1]: -# print(table) -# for vpu in inputs: -# print(vpu) -# orig = pd.read_csv(FINAL_DIR / region_fn.format(table,vpu)) -# new = pd.read_csv(REDO_DIR / region_fn.format(table,vpu)) -# if not orig.equals(new): -# print(table, vpu, orig.equals(new)) - -#for col in orig.columns: -# print(col, (orig[col] == new[col]).all()) -# if not (orig[col] == new[col]).all(): -# break -# -#qq = pd.merge(orig[["COMID", col]], new[["COMID", col]], -# on="COMID", suffixes=("_orig", "_new")) - - -#for state in states_dict: -# -# f = fn.format(table, state) -# orig = pd.read_csv(STATES_DIR / f) -# new = pd.read_csv(REDO_STATES / f) -# print(table, state, orig.equals(new)) + print (stat + " " + str(stats[stat])) + print("All Done.....") diff --git a/README.md b/README.md index 94a980a..9c27baf 100644 --- a/README.md +++ b/README.md @@ -1,55 +1,102 @@ # StreamCat ## Description: -The StreamCat Dataset (http://www2.epa.gov/national-aquatic-resource-surveys/streamcat) provides summaries of natural and anthropogenic landscape features for ~2.65 million streams, and their associated catchments, within the conterminous USA. This repo contains code used in StreamCat to process a suite of landscape rasters to watersheds for streams and their associated catchments (local reach contributing area) within the conterminous USA using the [NHDPlus Version 2](http://www.horizon-systems.com/NHDPlus/NHDPlusV2_data.php) as the geospatial framework. See [Running-StreamCat-Scripts](https://github.com/USEPA/StreamCat/wiki/Running-StreamCat-Scripts) for details on running the scripts to produce StreamCat data. +The StreamCat Dataset (http://www2.epa.gov/national-aquatic-resource-surveys/streamcat) provides summaries of natural and anthropogenic landscape features for ~2.65 million streams, and their associated catchments, within the conterminous USA. This repo contains code used in StreamCat to process a suite of landscape rasters to watersheds for streams and their associated catchments (local reach contributing area) within the conterminous USA using the [NHDPlus Version 2](http://www.horizon-systems.com/NHDPlus/NHDPlusV2_data.php) as the geospatial framework. ## Necessary Python Packages and Installation Tips -The scripts for StreamCat rely on several python modules a user will need to install such as numpy, pandas, gdal, fiona, rasterio, geopandas, shapely, pysal, and ArcPy with an ESRI license (minimal steps still using ArcPy). We highly recommend using a scientific python distribution such as [Anaconda](https://www.continuum.io/downloads) or [Enthought Canopy](https://www.enthought.com/products/canopy/). We used the conda package manager to install necessary python modules. Our essential packages and versions used are listed below (Windows 64 and Python 2.7.11): +The scripts for StreamCat rely on several python modules a user will need to install such as numpy, pandas, gdal, fiona, rasterio, geopandas, shapely, pysal, and ArcPy with an ESRI license (minimal steps still using ArcPy). We highly recommend using a scientific python distribution such as [Anaconda](https://www.continuum.io/downloads) or [Enthought Canopy](https://www.enthought.com/products/canopy/). We used the conda package manager to install necessary python modules. Our essential packages and versions when code was last used are listed below - note that other configurations may work, we simply have verified this particular combination (Windows 64 and Python 3.6.10): | Package | Version | | ------------- |--------------:| -| fiona | 1.7.7 | -| gdal | 2.2.0 | -| geopandas | 0.2.1 | -| geos | 3.5.1 | -| libgdal | 2.0.0 | -| numpy | 1.12.1 | -| pandas | 0.20.2 | -| pyproj | 1.9.5.1 | -| pysal | 1.13.0 | -| rasterio | 1.0a9 | -| shapely | 1.5.17 | +| python | 3.6.10 | +| fiona | 1.8.9.post2 | +| gdal | 2.4.4 | +| geopandas | 0.8.1 | +| geos | 3.8.1 | +| libgdal | 2.4.4 | +| numpy | 1.19.1 | +| pandas | 1.1.1 | +| pyproj | 2.6.1 | +| rasterio | 1.1.5 | +| shapely | 1.7.1 | If you are using Anaconda, creating a new, clean 'StreamCat' environment with these needed packages can be done easily and simply one of several ways: * In your conda shell, add one necessary channel and then download the streamcat environment from the Anaconda cloud: + conda config --add channels conda-forge - + conda env create mweber36/streamcat + + conda env create mweber36/StreamCat * Alternatively, using the streamcat.yml file in this repository, in your conda shell cd to the directory where your streamcat.yml file is located and run: - + conda env create -f StreamCat.yml + + conda env create -f streamcat_py3.yml * To build environment yourself, do: - + conda env create -n StreamCat rasterio geopandas - + pip install georasters + + conda create --name StreamCat -c conda-forge python=3.6 geopandas rasterio=1.1.5=py36h2409764_0 -* To activate this new environment and open Spyder, type the following at the conda prompt +* To activate this new environment, you'll need to install Spyder in the environment, and possibly re-install pyqt with specific version (we did). You may even need to uninstall pyqt after installing Spyder (as below) and then specifically re-install: + + + install spyder=4.1.4=py36h9f0ad1d_0 -c conda-forge + + install pyqt=5.12.3=py36h6538335_1 -c conda-forge + +* To open Spyder, type the following at the conda prompt + activate Streamcat Then + Spyder -Finally, to use arcpy in this new environment, you will need to copy your Arc .pth file into your new environment. Copy the .pth file for your install of ArcGIS located in a directory like: +Finally, to use arcpy in this new environment, you will need to copy several ArcPro files and folders to your new environment as follows: + ++ C:/Program Files/ArcGIS/Pro/bin/Python/envs/arcgispro-py3/Lib/site-packages/ArcGISPro.pth + ++ C:/Program Files/ArcGIS/Pro/bin/Python/envs/arcgispro-py3/Lib/site-packages/Arcgisscripting -+ C:\Python27\ArcGISx6410.3\Lib\site-packages\DTBGGP64.pth ++ C:/Program Files/ArcGIS/Pro/bin/Python/envs/arcgispro-py3/Lib/site-packages/arcpy_wmx + ++ C:/Program Files/ArcGIS/Pro/bin/Python/envs/arcgispro-py3/Lib/site-packages/Gapy To your environment directory which should look something like: -+ C:\Anaconda\envs\streamcat\Lib\site-packages\DTBGGP64.pth ++ C:/Users/mweber/AppData/Local/Continuum/anaconda3/envs/StreamCat/Lib/site-packages + +You will also need to install dateutils: + + conda install -c anaconda python-dateutil Note that the exact paths may vary depending on the version of ArcGIS and Anaconda you have installed and the configuration of your computer +## How to Run Scripts +### The scripts make use of 'control tables' to pass all the particular parameters to the three primary scripts: + ++ [StreamCat_PreProcessing.py](https://github.com/USEPA/StreamCat/blob/master/StreamCat_PreProcessing.py) ++ [StreamCat.py](https://github.com/USEPA/StreamCat/blob/master/StreamCat.py) ++ [MakeFinalTables.py](https://github.com/USEPA/StreamCat/blob/master/StreamCat_functions.py). + +In turn, these scripts rely on a generic functions in [StreamCat_functions.py](https://github.com/USEPA/StreamCat/blob/master/StreamCat_functions.py). + +To generate the riparian buffers we used in [StreamCat](ftp://newftp.epa.gov/EPADataCommons/ORD/NHDPlusLandscapeAttributes/StreamCat/Documentation/ReadMe.html) we used the code in [RiparianBuffers.py](https://github.com/USEPA/StreamCat/blob/master/RiparianBuffer.py) + +To generate percent full for catchments on the US border for point features, we used the code in [border.py](https://github.com/USEPA/StreamCat/blob/master/border.py) + +Examples of control tables used in scripts are: ++ [RasterControlTable](https://github.com/USEPA/StreamCat/blob/master/RasterControlTable.csv) ++ [ReclassTable](https://github.com/USEPA/StreamCat/blob/master/ReclassTable.csv) ++ [FieldCalcTable.](https://github.com/USEPA/StreamCat/blob/master/FieldCalcTable.csv) ++ [Lithology_lookup](https://github.com/USEPA/StreamCat/blob/master/Lithology_lookup.csv) ++ [NLCD2006_lookup](https://github.com/USEPA/StreamCat/blob/master/NLCD2006_lookup.csv) ++ [ControlTable_StreamCat](https://github.com/USEPA/StreamCat/blob/master/ControlTable_StreamCat.csv) ++ [MakeFinalTables](https://github.com/USEPA/StreamCat/blob/master/MakeFinalTables.csv) + +### Running StreamCat.py to generate new StreamCat metrics + +After editing the control tables to provide necessary information, such as directory paths, the following stesps will excecute processes to generate new watershed metrics for the conterminous US. All examples in the control table are for layers (e.g., STATSGO % clay content of soils) that were processed as part of the StreamCat Dataset. This example assumes run in Anaconda within Conda shell. + +1. Edit [ControlTable_StreamCat](https://github.com/USEPA/StreamCat/blob/master/ControlTable_StreamCat.csv) and set desired layer's "run" column to 1. All other columns should be set to 0 +2. Open a Conda shell and type "activate StreamCat" +3. At the Conda shell type: "Python" +4. Drag and drop "StreamCat.py" to the Conda shell from a file manager followed by another space +5. Drag and drop the control table to the Conda shell + +Final text in Conda shell should resemble this: python C:\some_path\StreamCat.py C:\some_other_path\ControlTable.csv + ## EPA Disclaimer The United States Environmental Protection Agency (EPA) GitHub project code is provided on an "as is" basis and the user assumes responsibility for its use. EPA has relinquished control of the information and no longer has responsibility to protect the integrity , confidentiality, or availability of the information. Any reference to specific commercial products, processes, or services by service mark, trademark, manufacturer, or otherwise, does not constitute or imply their endorsement, recommendation or favoring by EPA. The EPA seal and logo shall not be used in any manner to imply endorsement of any commercial product or activity by EPA or the United States Government. diff --git a/StreamCat.py b/StreamCat.py index 7a581d4..b2aea84 100644 --- a/StreamCat.py +++ b/StreamCat.py @@ -73,113 +73,100 @@ "accum_npy/vpu_inputs.npy", allow_pickle=True ).item() -for line in range(len(ctl.values)): # loop through each FullTableName in control table - if ctl.run[line] == 1: # check 'run' field from the table, if 1 run, if not, skip - print("running " + str(ctl.FullTableName[line])) - # Load metric specific variables - accum_type = ctl.accum_type[line] - RPU = int(ctl.by_RPU[line]) - mask = ctl.use_mask[line] - apm = ctl.AppendMetric[line] - if apm == "none": - apm = "" - if mask == 1: - mask_dir = mask_dir_RP100 - elif mask == 2: - mask_dir = mask_dir_Slp10 - elif mask == 3: - mask_dir = mask_dir_Slp20 - else: - mask_dir = "" - LL = "%s/%s" % (LYR_DIR, ctl.LandscapeLayer[line]) - ftn = ctl.FullTableName[line] - summaryfield = None - if type(ctl.summaryfield[line]) == str: - summaryfield = ctl.summaryfield[line].split(";") - if accum_type == "Point": # Load in point geopandas table and Pct_Full table - if mask == 0: # TODO: script to create this pct_full_file - pct_full_file = pct_full_file - if mask == 1: # TODO: script to create point in buffer for processing? - pct_full_file = pct_full_file_RP100 - pct_full = pd.read_csv(pct_full_file) - points = gpd.GeoDataFrame.from_file(LL) - if not os.path.exists(OUT_DIR + "/DBF_stash"): - os.mkdir(OUT_DIR + "/DBF_stash") - Connector = "%s/%s_connectors.csv" % ( - OUT_DIR, - ftn, - ) # File string to store InterVPUs needed for adjustments - catTime = dt.now() - for zone in INPUTS: - if not os.path.exists("%s/%s_%s.csv" % (OUT_DIR, ftn, zone)): - hydroregion = INPUTS[zone] - pre = "%s/NHDPlus%s/NHDPlus%s" % (NHD_DIR, hydroregion, zone) - if not accum_type == "Point": - if len(mask_dir) > 1: - izd = "%s/%s.tif" % (mask_dir, zone) - else: - izd = "%s/NHDPlusCatchment/cat" % (pre) - cat = createCatStats( - accum_type, - LL, - izd, - OUT_DIR, - zone, - RPU, - mask_dir, - NHD_DIR, - hydroregion, - apm, - ) - if accum_type == "Point": - izd = "%s/NHDPlusCatchment/Catchment.shp" % (pre) - cat = PointInPoly( - points, zone, izd, pct_full, mask_dir, apm, summaryfield - ) - cat.to_csv("%s/%s_%s.csv" % (OUT_DIR, ftn, zone), index=False) - in2accum = len(cat.columns) - print("Cat Results Complete in : " + str(dt.now() - catTime)) - try: - # if in2accum not defined...Cat process done,but error thrown in accum - in2accum - except NameError: - # get number of columns to test if accumulation needs to happen - in2accum = len(pd.read_csv("%s/%s_%s.csv" % (OUT_DIR, ftn, zone)).columns) - accumTime = dt.now() - for zone in INPUTS: - cat = pd.read_csv("%s/%s_%s.csv" % (OUT_DIR, ftn, zone)) - in2accum = len(cat.columns) - if len(cat.columns) == in2accum: - if zone in inter_vpu.ToZone.values: - cat = appendConnectors(cat, Connector, zone, inter_vpu) - accum = np.load("accum_npy/bastards/accum_%s.npz" % zone) +for _, row in ctl.query("run == 1").iterrows(): - cat.COMID = cat.COMID.astype(accum["comids"].dtype) - cat.set_index("COMID",inplace=True) - cat = cat.loc[accum["comids"]].reset_index().copy() - - up = Accumulation( - cat, accum["comids"], accum["lengths"], accum["upstream"], "Up" + print("running: " + row.FullTableName) + apm = "" if row.AppendMetric == "none" else row.AppendMetric + if row.use_mask == 1: + mask_dir = mask_dir_RP100 + elif row.use_mask == 2: + mask_dir = mask_dir_Slp10 + elif row.use_mask == 3: + mask_dir = mask_dir_Slp20 + else: + mask_dir = "" + LL = f"{LYR_DIR}/{row.LandscapeLayer}" + summaryfield = None + if type(row.summaryfield) == str: + summaryfield = row.summaryfield.split(";") + if row.accum_type == "Point": # Load in point geopandas table and Pct_Full table + if row.use_mask == 0: # TODO: script to create this pct_full_file + pct_full_file = pct_full_file + if row.use_mask == 1: + pct_full_file = pct_full_file_RP100 + pct_full = pd.read_csv(pct_full_file) + points = gpd.GeoDataFrame.from_file(LL) + if not os.path.exists(OUT_DIR + "/DBF_stash"): + os.mkdir(OUT_DIR + "/DBF_stash") + # File string to store InterVPUs needed for adjustments + Connector = f"{OUT_DIR}/{row.FullTableName}_connectors.csv" + catTime = dt.now() + for zone, hydroregion in INPUTS.items(): + if not os.path.exists(f"{OUT_DIR}/{row.FullTableName}_{zone}.csv"): + pre = f"{NHD_DIR}/NHDPlus{hydroregion}/NHDPlus{zone}" + if not row.accum_type == "Point": + izd = f"{mask_dir}/{zone}.tif" if mask_dir else f"{pre}/NHDPlusCatchment/cat" + cat = createCatStats( + row.accum_type, + LL, + izd, + OUT_DIR, + zone, + row.by_RPU, + mask_dir, + NHD_DIR, + hydroregion, + apm, ) - - ws = Accumulation( - cat, accum["comids"], accum["lengths"], accum["upstream"], "Ws" + if row.accum_type == "Point": + izd = f"{pre}/NHDPlusCatchment/Catchment.shp" + cat = PointInPoly( + points, zone, izd, pct_full, mask_dir, apm, summaryfield ) + cat.to_csv(f"{OUT_DIR}/{row.FullTableName}_{zone}.csv", index=False) + in2accum = len(cat.columns) + print("Cat Results Complete in : " + str(dt.now() - catTime)) + try: + # if in2accum not defined...Cat process done,but error thrown in accum + in2accum + except NameError: + # get number of columns to test if accumulation needs to happen + in2accum = len(pd.read_csv(f"{OUT_DIR}/{row.FullTableName}_{zone}.csv").columns) + accumTime = dt.now() + for zone in INPUTS: + cat = pd.read_csv(f"{OUT_DIR}/{row.FullTableName}_{zone}.csv") + in2accum = len(cat.columns) + if len(cat.columns) == in2accum: + if zone in inter_vpu.ToZone.values: + cat = appendConnectors(cat, Connector, zone, inter_vpu) + accum = np.load(f"accum_npy/accum_{zone}.npz") + + cat.COMID = cat.COMID.astype(accum["comids"].dtype) + cat.set_index("COMID",inplace=True) + cat = cat.loc[accum["comids"]].reset_index().copy() + + up = Accumulation( + cat, accum["comids"], accum["lengths"], accum["upstream"], "Up" + ) - if zone in inter_vpu.ToZone.values: - cat = pd.read_csv("%s/%s_%s.csv" % (OUT_DIR, ftn, zone)) - if zone in inter_vpu.FromZone.values: - interVPU( - ws, - cat.columns[1:], - accum_type, - zone, - Connector, - inter_vpu.copy(), - summaryfield, - ) - upFinal = pd.merge(up, ws, on="COMID") - final = pd.merge(cat, upFinal, on="COMID") - final.to_csv("%s/%s_%s.csv" % (OUT_DIR, ftn, zone), index=False) - print("Accumulation Results Complete in : " + str(dt.now() - accumTime)) + ws = Accumulation( + cat, accum["comids"], accum["lengths"], accum["upstream"], "Ws" + ) + + if zone in inter_vpu.ToZone.values: + cat = pd.read_csv(f"{OUT_DIR}/{row.FullTableName}_{zone}.csv") + if zone in inter_vpu.FromZone.values: + interVPU( + ws, + cat.columns[1:], + row.accum_type, + zone, + Connector, + inter_vpu.copy(), + summaryfield, + ) + upFinal = pd.merge(up, ws, on="COMID") + final = pd.merge(cat, upFinal, on="COMID") + final.to_csv(f"{OUT_DIR}/{row.FullTableName}_{zone}.csv", index=False) + print("Accumulation Results Complete in : " + str(dt.now() - accumTime)) print("total elapsed time " + str(dt.now() - totTime)) diff --git a/StreamCat_functions.py b/StreamCat_functions.py index e13d03b..5381ad9 100644 --- a/StreamCat_functions.py +++ b/StreamCat_functions.py @@ -38,6 +38,7 @@ from geopandas.tools import sjoin import fiona + ############################################################################## @@ -64,10 +65,10 @@ def UpcomDict(nhd, interVPUtbl, zone): """ # Returns UpCOMs dictionary for accumulation process # Provide either path to from-to tables or completed from-to table - flow = dbf2DF("%s/NHDPlusAttributes/PlusFlow.dbf" % (nhd))[["TOCOMID", "FROMCOMID"]] + flow = dbf2DF(f"{nhd}/NHDPlusAttributes/PlusFlow.dbf")[["TOCOMID", "FROMCOMID"]] flow = flow[(flow.TOCOMID != 0) & (flow.FROMCOMID != 0)] # check to see if out of zone values have FTYPE = 'Coastline' - fls = dbf2DF("%s/NHDSnapshot/Hydrography/NHDFlowline.dbf" % (nhd)) + fls = dbf2DF(f"{nhd}/NHDSnapshot/Hydrography/NHDFlowline.dbf") coastfl = fls.COMID[fls.FTYPE == "Coastline"] flow = flow[~flow.FROMCOMID.isin(coastfl.values)] # remove these FROMCOMIDs from the 'flow' table, there are three COMIDs here @@ -568,20 +569,20 @@ def PointInPoly( summaryfield : a list of the field/s in points feature to use for getting summary stats in polygons """ polys = gpd.GeoDataFrame.from_file(inZoneData) - points = points.to_crs(polys.crs) - if len(mask_dir) > 1: + points.to_crs(polys.crs, inplace=True) + if mask_dir: polys = polys.drop("AreaSqKM", axis=1) - tblRP = dbf2DF("%s/%s.tif.vat.dbf" % (mask_dir, zone)) + tblRP = dbf2DF(f"{mask_dir}/{zone}.tif.vat.dbf") tblRP["AreaSqKM"] = (tblRP.COUNT * 900) * 1e-6 tblRP["AreaSqKM"] = tblRP["AreaSqKM"].fillna(0) polys = pd.merge(polys, tblRP, left_on="GRIDCODE", right_on="VALUE", how="left") polys.crs = {u"datum": u"NAD83", u"no_defs": True, u"proj": u"longlat"} # Get list of lat/long fields in the table - points["latlon_tuple"] = zip( + points["latlon_tuple"] = tuple(zip( points.geometry.map(lambda point: point.x), points.geometry.map(lambda point: point.y), - ) + )) # Remove duplicate points for 'Count' points2 = points.drop_duplicates( "latlon_tuple" @@ -604,10 +605,11 @@ def PointInPoly( point_poly_count = grouped[ fld ].count() + point_poly_count.name = "COUNT" # Join Count column on to NHDCatchments table and keep only 'COMID','CatAreaSqKm','CatCount' final = polys.join(point_poly_count, on="FEATUREID", lsuffix="_", how="left") - final = final[["FEATUREID", "AreaSqKM", fld]].fillna(0) - cols = ["COMID", "CatAreaSqKm%s" % appendMetric, "CatCount%s" % appendMetric] + final = final[["FEATUREID", "AreaSqKM", "COUNT"]].fillna(0) + cols = ["COMID", f"CatAreaSqKm{appendMetric}", f"CatCount{appendMetric}"] if ( not summaryfield == None ): # Summarize fields in list with gpd table including duplicates @@ -635,7 +637,7 @@ def PointInPoly( "CatCountRp100", "CatPctFullRp100", ] - final["CatPctFull%s" % appendMetric] = final["CatPctFull%s" % appendMetric].fillna( + final[f"CatPctFull{appendMetric}"] = final[f"CatPctFull{appendMetric}"].fillna( 100 ) for name in final.columns: @@ -782,6 +784,7 @@ def Accumulation(tbl, comids, lengths, upstream, tbl_type, icol="COMID"): tbl_type : string value of table metrics to be returned icol : column in arr object to index """ + np.seterr(all='ignore') # RuntimeWarning: invalid value encountered in double_scalars coms = tbl[icol].values.astype("int32") # Read in comids indices = swapper(coms, upstream) # Get indices that will be used to map values del upstream # a and indices are big - clean up to minimize RAM @@ -793,14 +796,10 @@ def Accumulation(tbl, comids, lengths, upstream, tbl_type, icol="COMID"): # Loop and accumulate values for index, column in enumerate(cols, 1): col_values = tbl[column].values.astype("float") - all_values = np.array( - np.split(col_values[indices], accumulated_indexes), dtype=object - ) + all_values = np.split(col_values[indices], accumulated_indexes) if tbl_type is "Ws": # add identity value to each array for full watershed - all_values = np.array( - [np.append(val, col_values[idx]) for idx, val in enumerate(all_values)] - ) + all_values = [np.append(val, col_values[idx]) for idx, val in enumerate(all_values)] if index is 1: area = all_values.copy() if "PctFull" in column: @@ -810,7 +809,10 @@ def Accumulation(tbl, comids, lengths, upstream, tbl_type, icol="COMID"): ] elif "MIN" in column or "MAX" in column: func = np.max if "MAX" in column else np.min - values = np.array([func(val) for val in all_values]) + # initial is necessary to eval empty upstream arrays + # these values will be overwritten w/ nan later + initial = -999999 if "MAX" in column else 999999 + values = np.array([func(val, initial=initial) for val in all_values]) values[lengths == 0] = col_values[lengths == 0] else: values = np.array([np.nansum(val) for val in all_values]) @@ -903,27 +905,19 @@ def createCatStats( ZonalStatisticsAsTable( inZoneData, "VALUE", elev, outTable, "DATA", "ALL" ) - for rpu in range(len(rpuList)): - if rpu == 0: - table = dbf2DF(out_dir + "/zonalstats_elev%s.dbf" % (rpuList[rpu])) + for count, rpu in enumerate(rpuList): + if count == 0: + table = dbf2DF(f"{out_dir}/DBF_stash/zonalstats_elev{rpu}.dbf") else: table = pd.concat( [ table, - dbf2DF(out_dir + "/zonalstats_elev%s.dbf" % (rpuList[rpu])), + dbf2DF(f"{out_dir}/DBF_stash/zonalstats_elev{rpu}.dbf"), ] ) if len(rpuList) > 1: - clean = ( - table.groupby("VALUE")["AREA"] - .nlargest(1) - .reset_index() - .rename(columns={0: "AREA", "level_1": "index"}) - ) - table = pd.merge( - table.reset_index(), clean, on=["VALUE", "AREA", "index"], how="right" - ).set_index("index") - table = table.drop_duplicates(subset="VALUE") + table.reset_index(drop=True, inplace=True) + table = table.loc[table.groupby("VALUE").AREA.idxmax()] arcpy.CheckInExtension("spatial") except LicenseError: print("Spatial Analyst license is unavailable") @@ -1089,7 +1083,7 @@ def appendConnectors(cat, Connector, zone, interVPUtbl): ] cat = cat.append(con) - return cat + return cat.reset_index(drop=True) ############################################################################## @@ -1114,15 +1108,12 @@ def swapper(coms, upStream): ############################################################################## def make_all_cat_comids(nhd, inputs): - sys.stdout.write("Making allFLOWCOMs numpy file, reading zones...") + print("Making allFLOWCOMs numpy file, reading zones...", end="", flush=True) all_comids = np.array([], dtype=np.int32) - for zone in inputs: - sys.stdout.write(zone + ", ") - sys.stdout.flush() - hydroregion = inputs[zone] - pre = "%s/NHDPlus%s/NHDPlus%s" % (nhd, hydroregion, zone) - catchment = "%s/NHDPlusCatchment/Catchment.dbf" % pre - cats = dbf2DF(catchment) + for zone, hr in inputs.items(): + print(zone, end=", ", flush=True) + pre = f"{nhd}/NHDPlus{hr}/NHDPlus{zone}" + cats = dbf2DF(f"{pre}/NHDPlusCatchment/Catchment.dbf") all_comids = np.append(all_comids, cats.FEATUREID.values.astype(int)) np.savez_compressed("./accum_npy/allCatCOMs.npz", all_comids=all_comids) print("...done!") @@ -1142,17 +1133,13 @@ def makeNumpyVectors(inter_tbl, nhd): os.mkdir("accum_npy") inputs = nhd_dict(nhd) all_comids = make_all_cat_comids(nhd, inputs) - print("Making numpy files in zone...") - for zone in inputs: - sys.stdout.write(zone + ", ") - sys.stdout.flush() - hydroregion = inputs[zone] - pre = "%s/NHDPlus%s/NHDPlus%s" % (nhd, hydroregion, zone) - flow = dbf2DF(("%s/NHDPlusAttributes/" "PlusFlow.dbf") % (pre))[ - ["TOCOMID", "FROMCOMID"] - ] + print("Making numpy files in zone...", end="", flush=True) + for zone, hr in inputs.items(): + print(zone, end=", ", flush=True) + pre = f"{nhd}/NHDPlus{hr}/NHDPlus{zone}" + flow = dbf2DF(f"{pre}/NHDPlusAttributes/PlusFlow.dbf")[["TOCOMID", "FROMCOMID"]] flow = flow[(flow.TOCOMID != 0) & (flow.FROMCOMID != 0)] - fls = dbf2DF("%s/NHDSnapshot/Hydrography/NHDFlowline.dbf" % (pre)) + fls = dbf2DF(f"{pre}/NHDSnapshot/Hydrography/NHDFlowline.dbf") coastfl = fls.COMID[fls.FTYPE == "Coastline"] flow = flow[~flow.FROMCOMID.isin(coastfl.values)] # remove these FROMCOMIDs from the 'flow' table, there are three COMIDs @@ -1160,44 +1147,30 @@ def makeNumpyVectors(inter_tbl, nhd): flow = flow[~flow.FROMCOMID.isin(inter_tbl.removeCOMs)] # find values that are coming from other zones and remove the ones that # aren't in the interVPU table - out = np.setdiff1d(flow.FROMCOMID.values, fls.COMID.values) - out = out[ - np.nonzero(out) - ] # this should be what combines zones and above^, but we force connections with inter_tbl + out = out[np.nonzero(out)] # this should be what combines zones and above^, but we force connections with inter_tbl flow = flow[~flow.FROMCOMID.isin(np.setdiff1d(out, inter_tbl.thruCOMIDs.values))] - # Table is ready for processing and flow connection dict can be created - fcom, tcom = flow.FROMCOMID.values, flow.TOCOMID.values flow_dict = defaultdict(list) - for i in range(0, len(flow), 1): - from_comid = fcom[i] - if from_comid == 0: - continue - else: - flow_dict[tcom[i]].append(from_comid) + for _, row in flow.iterrows(): + flow_dict[row.TOCOMID].append(row.FROMCOMID) # add IDs from UpCOMadd column if working in ToZone, forces the flowtable connection though not there for interLine in inter_tbl.values: if interLine[6] > 0 and interLine[2] == zone: flow_dict[int(interLine[6])].append(int(interLine[0])) - out_of_vpus = inter_tbl.loc[ (inter_tbl.ToZone == zone) & (inter_tbl.DropCOMID == 0) ].thruCOMIDs.values - comids = list(all_comids.intersection(set(flow_dict.keys()))) - comids = np.append(comids, out_of_vpus) # TODO: check this out! - a = map(lambda x: bastards(x, flow_dict), comids) # list of upstream lists - b = [] - for i in range(len(a)): - if len(a[i]) == 0: - comids = np.delete(comids, 1) - continue - b.append(list(all_comids.intersection(a[i]))) - lengths = np.array([len(v) for v in b]) - upstream = np.int32(np.hstack(np.array(b))) # Convert to 1d vector - assert len(b) == len(lengths) == len(comids) + cats = dbf2DF(f"{pre}/NHDPlusCatchment/Catchment.dbf").set_index("FEATUREID") + comids = cats.index.values + comids = np.append(comids, out_of_vpus) + # list of upstream lists, filter comids in all_comids + ups = [list(all_comids.intersection(bastards(x, flow_dict))) for x in comids] + lengths = np.array([len(u) for u in ups]) + upstream = np.hstack(ups).astype(np.int32) # Convert to 1d vector + assert len(ups) == len(lengths) == len(comids) np.savez_compressed( - "./accum_npy/accum_%s.npz" % zone, + f"./accum_npy/accum_{zone}.npz", comids=comids, lengths=lengths, upstream=upstream, @@ -1224,7 +1197,7 @@ def nhd_dict(nhd, unit="VPU"): """ inputs = OrderedDict() - bounds = dbf2DF("%s/NHDPlusGlobalData/BoundaryUnit.dbf" % nhd) + bounds = dbf2DF(f"{nhd}/NHDPlusGlobalData/BoundaryUnit.dbf") remove = bounds.loc[bounds.DRAINAGEID.isin(["HI", "CI"])].index bounds = bounds.drop(remove, axis=0) if unit == "VPU": @@ -1241,7 +1214,7 @@ def nhd_dict(nhd, unit="VPU"): for _, row in rpu_bounds.iterrows(): hr = row.DRAINAGEID rpu = row.UNITID - for root, _, _ in os.walk("%s/NHDPlus%s" % (nhd, hr)): + for root, _, _ in os.walk(f"{nhd}/NHDPlus{hr}"): if rpu in root: zone = os.path.split(os.path.split(root)[0])[0][-2:] if not zone in inputs.keys(): @@ -1272,7 +1245,6 @@ def findUpstreamNpy(zone, com, numpy_dir): itemindex = int(np.where(comids == com)[0]) n = lengths[:itemindex].sum() arrlen = lengths[itemindex] - print("indexes (%s : %s)" % (n, n + arrlen)) return upStream[n : n + arrlen] diff --git a/check_metrics.py b/check_metrics.py new file mode 100644 index 0000000..46af48d --- /dev/null +++ b/check_metrics.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Jan 29 15:08:24 2021 + +This is a quick and dirty script to check the values w/in each table to check +that they are equivalent to the output that we have already on the L drive. + +@author: Rdebbout +""" + + +import numpy as np +import pandas as pd +INPUTS = np.load("accum_npy/vpu_inputs.npy", allow_pickle=True).item() +from stream_cat_config import OUT_DIR, FINAL_DIR + + +def run_checks(metrics, final=False): + check_dir = OUT_DIR if not final else FINAL_DIR + alloc_dir = "L:/Priv/CORFiles/Geospatial_Library_Projects/StreamCat/" + alloc_dir += ("FTP_Staging/HydroRegions" if final + else "Allocation_and_Accumulation") + for metric in metrics: + print(metric) + for zone in INPUTS: + print(zone, end="...", flush=True) + fn = f"{metric}{zone}.csv" if final else f"{metric}_{zone}.csv" + t1 = pd.read_csv(alloc_dir + "/" + fn).set_index("COMID").sort_index() + t2 = pd.read_csv(check_dir + "/" + fn).set_index("COMID").sort_index() + assert all(t1.index == t2.index) + assert t1.columns.sort_values().tolist() == t2.columns.sort_values().tolist() + tot = pd.merge(t1, t2, left_on=t1.index, right_on=t2.index) + for col in t1.columns: + if not "StorM3" in col: # N/A values won't compare in Dams summaryfields + tot["diff"] = abs(tot[f"{col}_x"] - tot[f"{col}_y"]) + assert len(tot.loc[tot["diff"] > 0.0000001]) == 0 + print("good!") + +metrics = ["nlcd2001_RipBuf100", "Dams", "CBNF"] +final_metrics = ["Dams_Region","Elevation_Region","Lithology_Region","STATSGO_Set1_Region"] +run_checks(metrics) +run_checks(final_metrics, final=True) \ No newline at end of file