diff --git a/configFiles/AOI_1_Rio_POI_Description.json b/configFiles/AOI_1_Rio_POI_Description.json
new file mode 100644
index 0000000..9e4f992
--- /dev/null
+++ b/configFiles/AOI_1_Rio_POI_Description.json
@@ -0,0 +1,84 @@
+{
+ "Airfield_POIApron Lights Tower": {
+ "featureIdNum": 1,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10
+ },
+ "Airfield_POIHangar": {
+ "featureIdNum": 2,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 25
+ },
+ "Airfield_POIHelipad": {
+ "featureIdNum": 3,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 18
+ },
+"Airfield_POISupport Facility": {
+ "featureIdNum": 4,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10
+ },
+"Bridges_TunnelsBridgePedestrian": {
+ "featureIdNum": 5,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 20
+ },
+"Bridges_TunnelsBridgeRoad": {
+ "featureIdNum": 6,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 20
+ },
+"Commercial_POIServiceBar": {
+ "featureIdNum": 7,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 25
+ },
+"Electrical_POITransmission Tower": {
+ "featureIdNum": 8,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 12
+ },
+"EmbassiesConsulate": {
+ "featureIdNum": 9,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 25
+ },
+"Olympic_Venues": {
+ "featureIdNum": 10,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 50
+ },
+"Public_Transportation_POIBusStop": {
+ "featureIdNum": 11,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5
+ },
+"Railways_POI999999Station": {
+ "featureIdNum": 12,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 25
+ },
+"Recreation_POIPark": {
+ "featureIdNum": 13,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 50
+ },
+"Recreation_POIPool": {
+ "featureIdNum": 14,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10
+ },
+"Recreation_POISports Facility": {
+ "featureIdNum": 15,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 30
+ },
+"Religious_InstitutionsChurch": {
+ "featureIdNum": 16,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 25
+ }
+}
+
+
diff --git a/configFiles/OSM_Power.json b/configFiles/OSM_Power.json
new file mode 100644
index 0000000..5551f26
--- /dev/null
+++ b/configFiles/OSM_Power.json
@@ -0,0 +1,83 @@
+{
+"power":
+ {
+ "compensator": {
+ "featureIdNum": 1,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5,
+ "spaceFeatureName": "power:compensator"
+ },
+ "converter": {
+ "featureIdNum": 2,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5,
+ "spaceFeatureName": "power:converter"
+ },
+ "heliostat": {
+ "featureIdNum": 3,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5,
+ "spaceFeatureName": "power:heliostat"
+ },
+ "insulator": {
+ "featureIdNum": 4,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10,
+ "spaceFeatureName": "power:insualtor"
+ },
+ "pole": {
+ "featureIdNum": 5,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5,
+ "spaceFeatureName": "power:pole"
+ },
+ "portal": {
+ "featureIdNum": 6,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10,
+ "spaceFeatureName": "power:portal"
+ },
+ "catenary_mast": {
+ "featureIdNum": 7,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10,
+ "spaceFeatureName": "power:catenary_mast"
+ },
+ "substation": {
+ "featureIdNum": 8,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 25,
+ "spaceFeatureName": "power:substation"
+ },
+ "switch": {
+ "featureIdNum": 9,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5,
+ "spaceFeatureName": "power:switch"
+ },
+ "terminal": {
+ "featureIdNum": 10,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 5,
+ "spaceFeatureName": "power:terminal"
+ },
+ "tower": {
+ "featureIdNum": 11,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10,
+ "spaceFeatureName": "power:tower"
+ },
+ "transformer": {
+ "featureIdNum": 12,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10,
+ "spaceFeatureName": "power:transformer"
+ },
+ "unknown": {
+ "featureIdNum": 13,
+ "featureChipScaleM": 200,
+ "featureBBoxSizeM": 10,
+ "spaceFeatureName": "power:unknown"
+ }
+ }
+}
diff --git a/content/.DS_Store b/content/.DS_Store
index 3ef473b..18f1788 100644
Binary files a/content/.DS_Store and b/content/.DS_Store differ
diff --git a/cw_environment.yml b/cw_environment.yml
new file mode 100644
index 0000000..6132d63
--- /dev/null
+++ b/cw_environment.yml
@@ -0,0 +1,59 @@
+name: cw_environment
+channels:
+- conda-forge
+- defaults
+dependencies:
+- cython
+- libgdal=2.2.1
+- numpy
+- opencv=3.2.0=np113py36_blas_openblas_204
+- pip=9.0.1=py36_0
+- pyproj
+- pytables
+- python=3.6.2=0
+- python-dateutil=2.6.1=py36_0
+- pytz=2017.2=py36_0
+- pywavelets=0.5.2=np113py36_0
+- pyyaml=3.12=py36_1
+- readline=6.2=0
+- requests=2.18.4=py36_1
+- scikit-image
+- scikit-learn
+- scipy
+- setuptools=36.2.2=py36_0
+- six=1.10.0=py36_1
+- sortedcontainers=1.5.7=py36_0
+- sqlite=3.13.0=1
+- tblib=1.3.2=py36_0
+- tk=8.5.19=2
+- toolz=0.8.2=py36_0
+- tornado=4.5.1=py36_0
+- urllib3=1.22=py36_0
+- wheel=0.29.0=py36_0
+- x264=20131217=3
+- xerces-c=3.1.4=3
+- xz=5.2.3=0
+- yaml=0.1.6=0
+- zict=0.1.2=py36_0
+- zlib=1.2.11=0
+- pip:
+ - affine==2.1.0
+ - boto3==1.4.6
+ - botocore==1.6.8
+ - click-plugins==1.0.3
+ - cligj==0.4.0
+ - descartes==1.1.0
+ - docutils==0.14
+ - fiona==1.7.9.post1
+ - geopandas==0.2.1
+ - jmespath==0.9.3
+ - keras==1.1.0
+ - munch==2.2.0
+ - pathlib
+ - rasterio==1.0a9
+ - s3transfer==0.1.10
+ - shapely==1.6.0
+ - snuggs==1.4.1
+ - tables==3.4.2
+ - theano==0.9.0
+ - tqdm
\ No newline at end of file
diff --git a/docker/Dockerfile b/docker/Dockerfile
index d68a3f0..17ed639 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -78,9 +78,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN pip install rtree
## Install Python requirements
-
RUN pip install pandas geopandas
+
ENV GIT_BASE=/opt/
WORKDIR $GIT_BASE
diff --git a/docker/standalone/cpu/Dockerfile b/docker/standalone/cpu/Dockerfile
index 9a2baf5..1fb3ba0 100644
--- a/docker/standalone/cpu/Dockerfile
+++ b/docker/standalone/cpu/Dockerfile
@@ -52,6 +52,7 @@ RUN pip install rtree
RUN pip install pandas geopandas
+
ENV GIT_BASE=/opt/
WORKDIR $GIT_BASE
diff --git a/docker/standalone/gpu/Dockerfile b/docker/standalone/gpu/Dockerfile
index 911a32f..11eaf61 100644
--- a/docker/standalone/gpu/Dockerfile
+++ b/docker/standalone/gpu/Dockerfile
@@ -51,6 +51,7 @@ RUN pip install rtree
RUN pip install pandas geopandas
+
ENV GIT_BASE=/opt/
WORKDIR $GIT_BASE
diff --git a/python/createDataSpaceNet.py b/python/createDataSpaceNet.py
index 386cb8f..a5c0258 100644
--- a/python/createDataSpaceNet.py
+++ b/python/createDataSpaceNet.py
@@ -4,6 +4,7 @@
from spaceNetUtilities import labelTools as lT
from spaceNetUtilities import geoTools as gT
import argparse
+import json
def processRasterChip(rasterImage, rasterDescription, geojson, geojsonDescription, outputDirectory='',
@@ -53,7 +54,9 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType='
outputPixType='',
datasetName='spacenetV2',
folder_name='folder_name',
- bboxResize=1.0
+ bboxResize=1.0,
+ objectClassDict='',
+ attributeName=''
):
if outputPixType == '':
@@ -82,7 +85,9 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType='
convertTo8Bit=convertTo8Bit,
outputPixType=outputPixType,
outputFormat=outputFormat,
- bboxResize=bboxResize
+ bboxResize=bboxResize,
+ objectClassDict=objectClassDict,
+ attributeName=attributeName
)
elif annotationType=='DARKNET':
entry = lT.geoJsonToDARKNET(annotationName, chipSummary['geoVectorName'], chipSummary['rasterSource'],
@@ -92,7 +97,9 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType='
convertTo8Bit=convertTo8Bit,
outputPixType=outputPixType,
outputFormat=outputFormat,
- bboxResize=bboxResize
+ bboxResize=bboxResize,
+ objectClassDict=objectClassDict,
+ attributeName=attributeName
)
elif annotationType=='SBD':
@@ -221,7 +228,7 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8,
action='store_true')
parser.add_argument("--featureName",
help='Type of feature to be summarized by csv (i.e. Building)'
- 'Currently in SpaceNet V2 Building is only label',
+ 'Currently in SpaceNet V2 Building is the only label',
type=str,
default='Buildings')
parser.add_argument("--spacenetVersion",
@@ -240,6 +247,12 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8,
type=float,
default=1.0)
+ parser.add_argument("--POIConfigJson",
+ help='JSON file containing information about valid Points of interest',
+ default='')
+ parser.add_argument("--attributeName",
+ help='Attribute in GeoJson to pull the feature name from',
+ default='spacenetFeature')
args = parser.parse_args()
entryList = []
@@ -256,7 +269,7 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8,
geojsonDirectory = os.path.join('vectordata','geojson')
# 'vectordata/geojson'
else:
- print('Bad Spacenet Version Submitted, Version {} is not supported'.foramt(args.spacenetVersion))
+ print('Bad Spacenet Version Submitted, Version {} is not supported'.format(args.spacenetVersion))
if args.convertTo8Bit:
@@ -272,11 +285,19 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8,
else:
fullPathAnnotationsDirectory = args.outputDirectory
+ if args.POIConfigJson == '':
+ objectClassDict=''
+ else:
+
+ with open(args.POIConfigJson, 'r') as f:
+ objectClassDict = json.load(f)
+
for aoiSubDir in listOfAOIs:
fullPathSubDir = os.path.join(srcSpaceNetDirectory, aoiSubDir)
## Create Annotations directory
- #fullPathAnnotationsDirectory = os.path.join(fullPathSubDir, annotationsDirectory)
+ ## fullPathAnnotationsDirectory = os.path.join(fullPathSubDir, annotationsDirectory)
+
if not os.path.exists(fullPathAnnotationsDirectory):
os.makedirs(fullPathAnnotationsDirectory)
if not os.path.exists(os.path.join(fullPathAnnotationsDirectory, 'annotations')):
@@ -314,7 +335,9 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8,
outputPixType=outputDataType,
datasetName='spacenetV2',
folder_name='folder_name',
- bboxResize= args.boundingBoxResize
+ bboxResize= args.boundingBoxResize,
+ objectClassDict=objectClassDict,
+ attributeName=args.attributeName
)
print(entryListTmp)
entryList.extend(entryListTmp)
diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py
new file mode 100644
index 0000000..35d2572
--- /dev/null
+++ b/python/createSpaceNetPOI.py
@@ -0,0 +1,301 @@
+from osgeo import ogr, gdal, osr
+from spaceNetUtilities import geoTools as gT
+import json
+import os
+import subprocess
+import geopandas as gpd
+from geopandas.geoseries import Polygon
+
+def returnBoundBoxM(tmpGeom, metersRadius=50):
+ poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(),
+ metersRadius)
+
+ polyEnv = gT.get_envelope(poly)
+
+
+
+
+ return polyEnv
+
+
+
+def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList):
+
+ ## Turn Point File into PolygonFile
+
+
+ srcDS = ogr.Open(srcVectorFile, 0)
+ inLayer = srcDS.GetLayer()
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ outDriver = ogr.GetDriverByName("GeoJSON")
+ if os.path.exists(outVectorFile):
+ outDriver.DeleteDataSource(outVectorFile)
+
+ dstDS = outDriver.CreateDataSource(outVectorFile)
+
+ outLayer = dstDS.CreateLayer('polygons', srs, geom_type=ogr.wkbPolygon)
+
+ # Get Attribute names
+ inLayerDefn = inLayer.GetLayerDefn()
+ for i in range(0, inLayerDefn.GetFieldCount()):
+ fieldDefn = inLayerDefn.GetFieldDefn(i)
+ outLayer.CreateField(fieldDefn)
+
+ outLayerDefn = outLayer.GetLayerDefn()
+
+ # Copy Features
+ for i in range(0, inLayer.GetFeatureCount()):
+
+ inFeature = inLayer.GetFeature(i)
+ outFeature = ogr.Feature(outLayerDefn)
+ for i in range(0, outLayerDefn.GetFieldCount()):
+ outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))
+ # Set geometry as centroid
+ tmpGeom = inFeature.GetGeometryRef()
+ poly = returnBoundBoxM(tmpGeom, metersRadius=pointOfInterestList[inFeature.GetField('spaceNetFeature')]['featureBBoxSizeM'])
+
+ outFeature.SetGeometry(poly)
+
+ outLayer.CreateFeature(outFeature)
+ outFeature = None
+ inFeature = None
+
+ srcDS = None
+ dstDS = None
+
+def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName='',
+ className='',
+ outputDirectory='',
+ seperateImageFolders=False,
+ minPartialToInclue = 0.70
+ ):
+
+
+ chipSummaryList = []
+ srcDS = srcDS = ogr.Open(srcVectorFile, 0)
+ srcLayer = srcDS.GetLayer()
+
+
+ shapeSrcList = []
+ for shapeFileSrc in shapeFileSrcList:
+ print(shapeFileSrc[1])
+ shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]])
+
+ ## determinePixelSize in Meters
+ print rasterFileList
+ print rasterFileList[0][0]
+ srcRaster = gdal.Open(rasterFileList[0][0])
+ geoTransform = srcRaster.GetGeoTransform()
+ metersPerPix = abs(round(geoTransform[5]*111111,1))
+
+ imgId = 0
+ for feature in srcLayer:
+
+ geom = feature.GetGeometryRef()
+
+ if geom.GetGeometryName != 'POINT':
+ geom = geom.Centroid()
+
+
+ spacenetFeatureName = feature.GetField('spaceNetFeature')
+ if seperateImageFolders:
+ className = spacenetFeatureName.replace(' ', '')
+ else:
+ className = ''
+ clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM']
+ halfClipSizeXm = round((clipSize/metersPerPix)/2)
+
+ xCenter = geom.GetX()
+ yCenter = geom.GetY()
+
+ maxXCut = xCenter + halfClipSizeXm*geoTransform[1]
+ maxYCut = yCenter + abs(halfClipSizeXm*geoTransform[5])
+ minYCut = yCenter - abs(halfClipSizeXm*geoTransform[5])
+ minXCut = xCenter - halfClipSizeXm*geoTransform[1]
+
+ imgId = imgId + 1
+
+ chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=[],
+ minpartialPerc=minPartialToInclue,
+ outputPrefix='',
+ createPix=False,
+ rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"),
+ className=className,
+ baseName=baseName,
+ imgId=imgId)
+
+ chipSummaryList.append(chipSummary)
+
+ return chipSummaryList
+
+
+def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500):
+
+
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMax = latCutOff
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMax = lonCutOff
+
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+
+ subprocess.call(cmd)
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMin = latCutOff
+ latMax = 500
+
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMin = lonCutOff
+ longMax = 500
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+ subprocess.call(cmd)
+
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'):
+
+
+ df = gpd.read_file(geoJsonIn, encoding=encoding)
+ df['y'] = df.geometry.map(lambda p: p.y)
+ df['x'] = df.geometry.map(lambda p: p.x)
+ df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True)
+ df.drop(['x', 'y'], 1, inplace=True)
+
+ if geoJsonOut=='':
+ geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson')
+ df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding)
+
+ return geoJsonOut
+
+def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'):
+
+ insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)])
+ df = gpd.read_file(geoJson, encoding=encoding)
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding)
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding)
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+if __name__ == '__main__':
+
+ createOutVectorFile = True
+ srcVectorFile = '/path/To/PointOfInterestSummary.geojson'
+ outVectorFile = '/path/To/PolygonOfInterestSummary.geojson'
+ outputDirectory = '/path/To/processedDataPOI/'
+ baseName = 'AOI_3_Paris'
+ featureDescriptionJson = '../configFiles/OSM_Power.json'
+ className = 'POIAll'
+ seperateImageFolders = False
+ splitGeoJson = True
+ deduplicateGeoJsonFlag = True
+
+ if deduplicateGeoJsonFlag:
+ srcVectorFile = deduplicateGeoJson(srcVectorFile)
+
+ splitGeoJson_latMin = -22.94
+ splitGeoJson_latMax = 90
+ splitGeoJson_lonMin = -43.25
+ splitGeoJson_lonMax = 180
+
+ srcVectorFileList = [[srcVectorFile, 'all']]
+
+ if splitGeoJson:
+ srcVectorTrain, srcVectorTest = splitVectorFileGPD(srcVectorFile,
+ latMin=splitGeoJson_latMin,
+ latMax=splitGeoJson_latMax,
+ lonMin=splitGeoJson_lonMin,
+ lonMax=splitGeoJson_lonMax,
+ )
+
+ srcVectorFileList = [
+ [srcVectorTrain, 'train'],
+ [srcVectorTest, 'test']
+ ]
+
+
+
+
+
+ # List of Raster Images to Chip and an appropriate label.
+ # This list will take any type of Raster supported by GDAL
+ # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one
+ # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing
+ rasterFileList = [['/path/To/Pan-Band.vrt', 'Pan'],
+ ['/path/To/8-BandVRT.vrt', '8band']
+ ]
+
+
+
+
+ ### Define Point of Interest Dictionary
+ # {'spacenetFeatureName':
+ # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping
+ # 'featureChipScaleM': 200, # Size of chip to create in meters
+ # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point
+ # # indicating in the geojson. This will be used in creating polygons for IOU scoring
+ # }
+
+ with open(featureDescriptionJson, 'r') as j:
+ pointOfInterestList = json.load(j)
+
+ # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size.
+ for srcVectorFile, folderType in srcVectorFileList:
+ outVectorFile = srcVectorFile.replace('.geojson', 'poly.geojson')
+ if createOutVectorFile:
+ createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList)
+
+
+ outputDirectoryTmp = os.path.join(outputDirectory, folderType)
+
+ shapeFileSrcList = [
+ [outVectorFile, 'POIAll']
+ ]
+ # create Folder Structure to place files into.
+
+ for rasterFile in rasterFileList:
+ for featureName in pointOfInterestList.keys():
+ tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], featureName.replace(' ', ''))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ # create Processed Point of Interest Data.
+ createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName=baseName,
+ className='',
+ outputDirectory=outputDirectoryTmp,
+ seperateImageFolders=seperateImageFolders,
+ minPartialToInclue=0.70)
+
+
+
+
+
diff --git a/python/createSpaceNetPOI_Round2.py b/python/createSpaceNetPOI_Round2.py
new file mode 100644
index 0000000..1ff5fc5
--- /dev/null
+++ b/python/createSpaceNetPOI_Round2.py
@@ -0,0 +1,361 @@
+from osgeo import ogr, gdal, osr
+from spaceNetUtilities import geoToolsGPD as gT
+import json
+import os
+import subprocess
+import geopandas as gpd
+from geopandas.geoseries import Polygon
+import osmnx as ox
+def returnBoundBoxM(tmpGeom, metersRadius=50):
+ poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(),
+ metersRadius)
+
+ polyEnv = poly.envelope
+
+
+
+
+ return polyEnv
+
+def createPolygonShapeFileGPD(srcVectorFile, pointOfInterestList, outVectorFile=''):
+
+ gdfOrig = gpd.read_file(srcVectorFile)
+
+ firstKey = True
+
+
+
+ ## Iterage through keys for valid points of interest, These correspond to OSM Tags such as Power or Places.
+ for pointOfInterestKey in pointOfInterestList.keys():
+ ## Select from Vector File all rows where Point of Interest Columns is not an empty String
+ gdfKey = gdfOrig[gdfOrig[pointOfInterestKey] != '']
+
+ # Get list of defined features in the set Tag List, i.e. power=objectName
+
+ objectNameList = pointOfInterestList[pointOfInterestKey].keys()
+
+ # iterate through object names to apply specific actions with regard to the object like bounding box size.
+
+ for objectName in objectNameList:
+ # select only those objects where the key=object i.e power=tower
+ gdfPOITemp = gdfKey[gdfKey[pointOfInterestKey] == objectName]
+
+ # "compensator": {
+ # "featureIdNum": 1,
+ # "featureChipScaleM": 200,
+ # "featureBBoxSizeM": 5
+ # },
+ # project subset to UTM so that actions can be done with regard to meters
+ gdfPOITempUTM = ox.project_gdf(gdfPOITemp)
+ # buffer geometry to create circle around point with x meters
+ gdfPOITempUTM.geometry=gdfPOITempUTM.buffer(pointOfInterestList[pointOfInterestKey][objectName]['featureBBoxSizeM'])
+ # calculate the envelope of the circle to create a bounding box for each object
+ gdfPOITempUTM.geometry=gdfPOITempUTM.envelope
+
+ # reporject to wgs84 (lat long)
+ gdfPOITemp = ox.project_gdf(gdfPOITempUTM, to_latlong=True)
+ # assign a name to spacefeat for future use in the vector: "power:tower"
+ gdfPOITemp['spacefeat']=pointOfInterestList[pointOfInterestKey][objectName]['spaceFeatureName']
+
+ # if first key, create final gdf
+ if firstKey:
+ gdfFinal = gdfPOITemp
+ else:
+ # else add new to end of gdfFianl
+ gdfFinal = gdfFinal.concat(gdfPOITemp)
+
+ if outVectorFile != '':
+ gdfFinal.to_file(outVectorFile)
+
+ return gdfFinal
+
+def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList):
+
+ ## Turn Point File into PolygonFile
+
+
+ srcDS = ogr.Open(srcVectorFile, 0)
+ inLayer = srcDS.GetLayer()
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ outDriver = ogr.GetDriverByName("GeoJSON")
+ if os.path.exists(outVectorFile):
+ outDriver.DeleteDataSource(outVectorFile)
+
+ dstDS = outDriver.CreateDataSource(outVectorFile)
+
+ outLayer = dstDS.CreateLayer('polygons', srs, geom_type=ogr.wkbPolygon)
+
+ # Get Attribute names
+ inLayerDefn = inLayer.GetLayerDefn()
+ for i in range(0, inLayerDefn.GetFieldCount()):
+ fieldDefn = inLayerDefn.GetFieldDefn(i)
+ outLayer.CreateField(fieldDefn)
+
+ outLayerDefn = outLayer.GetLayerDefn()
+
+ # Copy Features
+ for i in range(0, inLayer.GetFeatureCount()):
+
+ inFeature = inLayer.GetFeature(i)
+ outFeature = ogr.Feature(outLayerDefn)
+ for i in range(0, outLayerDefn.GetFieldCount()):
+ outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))
+ # Set geometry as centroid
+ tmpGeom = inFeature.GetGeometryRef()
+ poly = returnBoundBoxM(tmpGeom, metersRadius=pointOfInterestList[inFeature.GetField('spaceNetFeature')]['featureBBoxSizeM'])
+
+ outFeature.SetGeometry(poly)
+
+ outLayer.CreateFeature(outFeature)
+ outFeature = None
+ inFeature = None
+
+ srcDS = None
+ dstDS = None
+
+def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName='',
+ className='',
+ outputDirectory='',
+ seperateImageFolders=False,
+ minPartialToInclue = 0.70,
+ fieldName='spacefeat'):
+
+
+ chipSummaryList = []
+ srcDS = srcDS = ogr.Open(srcVectorFile, 0)
+ srcLayer = srcDS.GetLayer()
+
+
+ shapeSrcList = []
+ for shapeFileSrc in shapeFileSrcList:
+ print(shapeFileSrc[1])
+ shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]])
+
+ ## determinePixelSize in Meters
+ print rasterFileList
+ print rasterFileList[0][0]
+ srcRaster = gdal.Open(rasterFileList[0][0])
+ geoTransform = srcRaster.GetGeoTransform()
+ metersPerPix = abs(round(geoTransform[5]*111111,1))
+
+ imgId = 0
+ for feature in srcLayer:
+
+ geom = feature.GetGeometryRef()
+
+ if geom.GetGeometryName != 'POINT':
+ geom = geom.Centroid()
+
+
+ spacenetFeatureName = feature.GetField('spaceNetFeature')
+ if seperateImageFolders:
+ className = spacenetFeatureName.replace(' ', '')
+ else:
+ className = ''
+ clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM']
+ halfClipSizeXm = round((clipSize/metersPerPix)/2)
+
+ xCenter = geom.GetX()
+ yCenter = geom.GetY()
+
+ maxXCut = xCenter + halfClipSizeXm*geoTransform[1]
+ maxYCut = yCenter + abs(halfClipSizeXm*geoTransform[5])
+ minYCut = yCenter - abs(halfClipSizeXm*geoTransform[5])
+ minXCut = xCenter - halfClipSizeXm*geoTransform[1]
+
+ imgId = imgId + 1
+
+ chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=[],
+ minpartialPerc=minPartialToInclue,
+ outputPrefix='',
+ createPix=False,
+ rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"),
+ className=className,
+ baseName=baseName,
+ imgId=imgId)
+
+ chipSummaryList.append(chipSummary)
+
+ return chipSummaryList
+
+
+def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500):
+
+
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMax = latCutOff
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMax = lonCutOff
+
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+
+ subprocess.call(cmd)
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMin = latCutOff
+ latMax = 500
+
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMin = lonCutOff
+ longMax = 500
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+ subprocess.call(cmd)
+
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'):
+
+
+ df = gpd.read_file(geoJsonIn, encoding=encoding)
+ df['y'] = df.geometry.map(lambda p: p.y)
+ df['x'] = df.geometry.map(lambda p: p.x)
+ df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True)
+ df.drop(['x', 'y'], 1, inplace=True)
+
+ if geoJsonOut=='':
+ geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson')
+ df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding)
+
+ return geoJsonOut
+
+def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'):
+
+ insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)])
+ df = gpd.read_file(geoJson, encoding=encoding)
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding)
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding)
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+
+
+
+
+if __name__ == '__main__':
+
+ createOutVectorFile = True
+ srcVectorFile = '/path/To/PointOfInterestSummary.geojson'
+ outVectorFile = '/path/To/PolygonOfInterestSummary.geojson'
+ outputDirectory = '/path/To/processedDataPOI/'
+ baseName = 'AOI_3_Paris'
+ featureDescriptionJson = '../configFiles/OSM_Power.json'
+ className = 'POIAll'
+ seperateImageFolders = False
+ splitGeoJson = True
+ deduplicateGeoJsonFlag = True
+
+ if deduplicateGeoJsonFlag:
+ srcVectorFile = deduplicateGeoJson(srcVectorFile)
+
+ splitGeoJson_latMin = -22.94
+ splitGeoJson_latMax = 90
+ splitGeoJson_lonMin = -43.25
+ splitGeoJson_lonMax = 180
+
+ srcVectorFileList = [[srcVectorFile, 'all']]
+
+ if splitGeoJson:
+ srcVectorTrain, srcVectorTest = splitVectorFileGPD(srcVectorFile,
+ latMin=splitGeoJson_latMin,
+ latMax=splitGeoJson_latMax,
+ lonMin=splitGeoJson_lonMin,
+ lonMax=splitGeoJson_lonMax,
+ )
+
+ srcVectorFileList = [
+ [srcVectorTrain, 'train'],
+ [srcVectorTest, 'test']
+ ]
+
+
+
+
+
+ # List of Raster Images to Chip and an appropriate label.
+ # This list will take any type of Raster supported by GDAL
+ # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one
+ # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing
+ rasterFileList = [['/path/To/Pan-VRT.vrt', 'PAN'],
+ ['/path/To/MUL-VRT.vrt', 'MUL']
+ ['/path/To/RGB-PanSharpen-VRT.vrt', 'RGB-PanSharpen']
+ ['/path/To/MUL-PanSharpen-VRT.vrt', 'MUL-PanSharpen']
+ ]
+
+
+
+
+ ### Define Point of Interest Dictionary
+ # {'spacenetFeatureName':
+ # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping
+ # 'featureChipScaleM': 200, # Size of chip to create in meters
+ # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point
+ # # indicating in the geojson. This will be used in creating polygons for IOU scoring
+ # }
+
+ with open(featureDescriptionJson, 'r') as j:
+ pointOfInterestList = json.load(j)
+
+ # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size.
+ for srcVectorFile, folderType in srcVectorFileList:
+ outVectorFile = srcVectorFile.replace('.geojson', 'poly.shp')
+ if createOutVectorFile:
+ createPolygonShapeFileGPD(srcVectorFile, pointOfInterestList, outVectorFile=outVectorFile)
+
+ outputDirectoryTmp = os.path.join(outputDirectory, folderType)
+
+ shapeFileSrcList = [
+ [outVectorFile, 'POIAll']
+ ]
+ # create Folder Structure to place files into.
+
+ for rasterFile in rasterFileList:
+ for keyName in pointOfInterestList.keys():
+ tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', ''))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ for objectName in pointOfInterestList[keyName].keys():
+ tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', ''), objectName.replace(" ",""))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ # create Processed Point of Interest Data.
+ createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName=baseName,
+ className='',
+ outputDirectory=outputDirectoryTmp,
+ seperateImageFolders=seperateImageFolders,
+ minPartialToInclue=0.70)
+
+
+
+
+
diff --git a/python/createSpaceNetPOI_Round2GPD_clean.py b/python/createSpaceNetPOI_Round2GPD_clean.py
new file mode 100644
index 0000000..5219e19
--- /dev/null
+++ b/python/createSpaceNetPOI_Round2GPD_clean.py
@@ -0,0 +1,358 @@
+from spaceNetUtilities import geoToolsGPD as gT
+import json
+import os
+import subprocess
+import geopandas as gpd
+from geopandas.geoseries import Polygon
+import geopandas_osm
+import osmnx as ox
+import rasterio
+import shapely.wkt
+import pandas as pd
+
+
+
+def returnBoundBoxM(tmpGeom, metersRadius=50):
+ poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(),
+ metersRadius)
+
+ polyEnv = poly.envelope
+
+ return polyEnv
+
+def createPolygonShapeFileFromOSM(polyBounds, pointOfInterestList, outVectorFile='', debug=True):
+
+ gdfOSM = gpd.GeoDataFrame([])
+ for pointOfInterestKey in pointOfInterestList.keys():
+ gdfPOITemp = geopandas_osm.osm.query_osm('way', polyBounds, recurse='down',
+ tags='{}'.format(pointOfInterestKey))
+
+ gdfOSM.append(gdfPOITemp)
+
+ gdfPOITemp = geopandas_osm.osm.query_osm('node', polyBounds, recurse='down',
+ tags='{}'.format(pointOfInterestKey))
+
+ gdfOSM.append(gdfPOITemp)
+
+ gdfOSM.drop_duplicates(subset=['id'], keep='first', inplace=True)
+
+ ## convert closed LineStrings to Polygon
+ geomList = []
+ for geom in gdfOSM.geometry.values:
+ if geom.is_closed & geom.geom_type == "LineString":
+ geom = Polygon(geom)
+ else:
+ geom
+ geomList.append(geom)
+
+ gdfOSM.geometry = geomList
+
+ gdfOSM = gdfOSM[(gdfOSM.geom_type=='Point') or (gdfOSM.geom_type=='Polygon')]
+ ##
+
+ gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOSM, outVectorFile=outVectorFile)
+
+ return gdfFinal
+
+
+def createPolygonShapeFileFromShapefile(srcVectorFile, pointOfInterestList, outVectorFile=''):
+
+ gdfOrig = gpd.read_file(srcVectorFile)
+ gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOrig, outVectorFile=outVectorFile)
+
+ return gdfFinal
+
+
+
+def createPolygonShapeFileGPD(pointOfInterestList, gdfOrig, outVectorFile=''):
+
+ ## todo add support for polygons in addition to points
+ firstKey = True
+ ## Iterage through keys for valid points of interest, These correspond to OSM Tags such as Power or Places.
+ for pointOfInterestKey in pointOfInterestList.keys():
+ ## Select from Vector File all rows where Point of Interest Columns is not an empty String
+ gdfKey = gdfOrig[gdfOrig[pointOfInterestKey] != '']
+
+ # Get list of defined features in the set Tag List, i.e. power=objectName
+
+ objectNameList = pointOfInterestList[pointOfInterestKey].keys()
+
+ # iterate through object names to apply specific actions with regard to the object like bounding box size.
+
+ for objectName in objectNameList:
+ # select only those objects where the key=object i.e power=tower
+ gdfPOITemp = gdfKey[gdfKey[pointOfInterestKey] == objectName]
+
+ if gdfPOITemp.size>0:
+ utm_crs, latlong_crs = gT.createUTMandLatLonCrs(gdfPOITemp.geometry.values[0])
+ # "compensator": {
+ # "featureIdNum": 1,
+ # "featureChipScaleM": 200,
+ # "featureBBoxSizeM": 5
+ # },
+ # project subset to UTM so that actions can be done with regard to meters
+ gdfPOITempUTM = gdfPOITemp.to_crs(utm_crs)
+ # buffer geometry to create circle around point with x meters
+ gdfPOITempUTM.geometry=gdfPOITempUTM.buffer(pointOfInterestList[pointOfInterestKey][objectName]['featureBBoxSizeM'])
+ # calculate the envelope of the circle to create a bounding box for each object
+ gdfPOITempUTM.geometry=gdfPOITempUTM.envelope
+
+ # reporject to wgs84 (lat long)
+ gdfPOITemp = gdfPOITempUTM.to_crs(latlong_crs)
+ # assign a name to spacefeat for future use in the vector: "power:tower"
+ gdfPOITemp['spacefeat']=pointOfInterestList[pointOfInterestKey][objectName]['spaceFeatureName']
+
+ # if first key, create final gdf
+ if firstKey:
+ gdfFinal = gdfPOITemp
+ firstKey = False
+ else:
+ # else add new to end of gdfFinal
+ gdfFinal = gdfFinal.concat(gdfPOITemp)
+
+ if outVectorFile != '':
+ gdfFinal.to_file(outVectorFile)
+
+ return gdfFinal
+
+
+def createProcessedPOIDataGPD(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName='',
+ className='',
+ outputDirectory='',
+ seperateImageFolders=False,
+ minPartialToInclue = 0.70,
+ defaultfeatureChipScaleM=200,
+ verbose=False
+ ):
+
+ fieldName = pointOfInterestList.keys()[0]
+ fieldOfInterestList = pointOfInterestList[fieldName]
+ chipSummaryList = []
+
+
+ ## determinePixelSize in Meters
+ if verbose:
+ print(rasterFileList)
+ print(rasterFileList[0][0])
+
+ with rasterio.open(rasterFileList[0][0]) as src:
+ geoTransform = src.affine.to_gdal()
+
+ metersPerPix = abs(round(geoTransform[5]*111111,1))
+
+ ## process pointShapeFile
+ srcDf = gpd.read_file(srcVectorFile)
+
+ ## check if fieldName is valid otherwise features are detectable:
+ if fieldName in srcDf.columns:
+ print("{} is detected, processing srcFile = {}".format(fieldName, srcVectorFile))
+ else:
+ print("Error {} is not a valid column Name, unable to process srcFile = {}".format(fieldName, srcVectorFile))
+ print("Error Field name= {} is not in column names = {}". format(fieldName, srcDf.columns))
+ print("Ensure {} is a column name, unable to process srcFile = {}".format(fieldName, srcVectorFile))
+
+ return -1
+
+ utm_crs, latlong_crs = gT.createUTMandLatLonCrs(srcDf.centroid.values[0])
+
+ ## not sure if needed
+ #gdf_utm = srcDf.to_crs(utm_crs)
+ #halfClipSizeXm = round((clipSizeM/metersPerPix)/2)
+ #gdf_utm.buffer(halfClipSizeXm).envelope
+
+
+
+
+ imgId = 0
+ for idx, feature in srcDf.iterrows():
+
+ geom = feature.geometry.centroid
+
+
+ featureName = feature[fieldName]
+
+ if seperateImageFolders:
+ className = featureName.replace(' ', '')
+ else:
+ className = ''
+ if featureName in fieldOfInterestList.keys():
+ clipSize = fieldOfInterestList[featureName]['featureChipScaleM']
+ else:
+ clipSize = defaultfeatureChipScaleM
+
+
+ halfClipSizeXm = round((clipSize/metersPerPix)/2)
+
+
+ maxXCut = geom.x + halfClipSizeXm*geoTransform[1]
+ maxYCut = geom.y + abs(halfClipSizeXm*geoTransform[5])
+ minYCut = geom.y - abs(halfClipSizeXm*geoTransform[5])
+ minXCut = geom.x - halfClipSizeXm*geoTransform[1]
+
+ imgId = imgId + 1
+
+ chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeFileSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=[],
+ minpartialPerc=minPartialToInclue,
+ outputPrefix='',
+ createPix=False,
+ rasterPolyEnvelope=shapely.wkt.loads('POLYGON EMPTY'),
+ className=className,
+ baseName=baseName,
+ imgId=imgId)
+
+ chipSummaryList.append(chipSummary)
+
+ return chipSummaryList
+
+
+def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500):
+
+
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMax = latCutOff
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMax = lonCutOff
+
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+
+ subprocess.call(cmd)
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMin = latCutOff
+ latMax = 500
+
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMin = lonCutOff
+ longMax = 500
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+ subprocess.call(cmd)
+
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'):
+
+
+ df = gpd.read_file(geoJsonIn, encoding=encoding)
+ df['y'] = df.geometry.map(lambda p: p.y)
+ df['x'] = df.geometry.map(lambda p: p.x)
+ df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True)
+ df.drop(['x', 'y'], 1, inplace=True)
+
+ if geoJsonOut=='':
+ geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson')
+ df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding)
+
+ return geoJsonOut
+
+def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'):
+
+ insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)])
+ df = gpd.read_file(geoJson, encoding=encoding)
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding)
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding)
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+
+
+
+def processPointShapeFile():
+
+ pass
+
+
+
+if __name__ == '__main__':
+
+ createOutVectorFile = True
+ srcVectorFile = '/path/To/PointOfInterestSummary.geojson'
+ outVectorFile = '/path/To/PolygonOfInterestSummary.geojson'
+ outputDirectory = '/path/To/processedDataPOI/'
+ baseName = 'AOI_3_Paris'
+ featureDescriptionJson = '../configFiles/OSM_Power.json'
+ className = 'POIAll'
+ seperateImageFolders = True
+ splitGeoJson = False
+
+
+
+
+ srcVectorFileList = [[srcVectorFile, 'all']]
+
+ # List of Raster Images to Chip and an appropriate label.
+ # This list will take any type of Raster supported by GDAL
+ # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one
+ # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing
+ rasterFileList = [['/path/To/Pan-VRT.vrt', 'PAN'],
+ ['/path/To/MUL-VRT.vrt', 'MUL']
+ ['/path/To/RGB-PanSharpen-VRT.vrt', 'RGB-PanSharpen']
+ ['/path/To/MUL-PanSharpen-VRT.vrt', 'MUL-PanSharpen']
+ ]
+
+
+
+
+ ### Define Point of Interest Dictionary
+ # {'spacenetFeatureName':
+ # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping
+ # 'featureChipScaleM': 200, # Size of chip to create in meters
+ # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point
+ # # indicating in the geojson. This will be used in creating polygons for IOU scoring
+ # }
+
+ with open(featureDescriptionJson, 'r') as j:
+ pointOfInterestList = json.load(j)
+
+
+ # create Folder Structure to place files into.
+
+ for rasterFile in rasterFileList:
+ for keyName in pointOfInterestList.keys():
+ tmpPath = os.path.join(outputDirectory, rasterFile[1], keyName.replace(' ', ''))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ for objectName in pointOfInterestList[keyName].keys():
+ tmpPath = os.path.join(outputDirectory, rasterFile[1], keyName.replace(' ', ''), objectName.replace(" ",""))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ # create Processed Point of Interest Data.
+ createProcessedPOIDataGPD(srcVectorFile, pointOfInterestList, rasterFileList, srcVectorFileList,
+ baseName=baseName,
+ className='',
+ outputDirectory=outputDirectory,
+ seperateImageFolders=seperateImageFolders,
+ minPartialToInclue=0.70)
+
+
+
+
+
diff --git a/python/evaluateScene.py b/python/evaluateScene.py
index 6d0bb04..58ba1f3 100644
--- a/python/evaluateScene.py
+++ b/python/evaluateScene.py
@@ -241,7 +241,7 @@ def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList,
removeGeoJsonAfter=True):
srcBaseName = os.path.splitext(baseName)[0]
geoJsonList = glob.glob(srcBaseName+"_*.geojson")
- print geoJsonList
+ print(geoJsonList)
rasterList = []
for rasterLocation in rasterLocationList:
@@ -321,18 +321,6 @@ def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList,
-
-
-
-
-
-
-
-
-
-
-
-
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Evaluate Score for SpaceNet')
diff --git a/python/evaluateScene_Total.py b/python/evaluateScene_Total.py
new file mode 100644
index 0000000..e449a6a
--- /dev/null
+++ b/python/evaluateScene_Total.py
@@ -0,0 +1,371 @@
+from spaceNetUtilities import evalTools as eT
+from spaceNetUtilities import geoToolsGPD as gT
+import numpy as np
+import csv
+import multiprocessing
+import time
+import argparse
+import os
+import glob
+import geopandas as gpd
+from osgeo import ogr, osr, gdal
+
+
+def writeAOISummaryToCSV(resultsDict,csvwriter):
+
+ csvwriter.writerow(['TruthFile', resultsDict['TruthFile']])
+ csvwriter.writerow(['ProposalFile', resultsDict['ProposalFile']])
+ csvwriter.writerow(['AOI_Name', resultsDict['AOI_Name']])
+ csvwriter.writerow(['Summary Results'])
+ csvwriter.writerow(['F1Score Total', resultsDict['F1ScoreTotal']])
+ csvwriter.writerow(['Precision', resultsDict['PrecisionTotal']])
+ csvwriter.writerow(['Recall', resultsDict['RecalTotal']])
+ csvwriter.writerow(['True Positive Total', resultsDict['TruePositiveTotal']])
+ csvwriter.writerow(['False Positive Total', resultsDict['FalsePositiveTotal']])
+ csvwriter.writerow(['False Negative Total', resultsDict['FalseNegativeTotal']])
+ csvwriter.writerow([''])
+
+ # resultsDict = {'AOI_Name': aoiName
+ # 'TruthFile': truth_fp,
+ # 'ProposalFile': test_fp,
+ # 'F1ScoreTotal': F1ScoreTotal,
+ # 'PrecisionTotal': precision,
+ # 'RecalTotal': recall,
+ # 'TruePositiveTotal': true_pos_total,
+ # 'FalsePositiveTotal': false_pos_total,
+ # 'FalseNegativeTotal': false_neg_total,
+ # 'PerImageStatsResultList': result_list,
+ # 'OutputSummaryFile': resultsOutputFile}
+
+
+
+
+
+def writeResultsToScreen(resultsDict):
+
+ print('AOI of Interest', resultsDict['AOI_Name'])
+ print('True_Pos_Total', resultsDict['TruePositiveTotal'])
+ print('False_Pos_Total', resultsDict['FalsePositiveTotal'])
+ print('False_Neg_Total', resultsDict['FalseNegativeTotal'])
+ print('F1ScoreTotal', resultsDict['F1ScoreTotal'])
+
+
+def writePerChipToCSV(resultsDictList, csvwriter):
+ resultsDict = resultsDictList[0]
+ csvwriter.writerow(['ImageId', 'F1Score', 'True Positive Count', 'False Positive Count', 'False Negative Count'])
+ for result in resultsDict['PerImageStatsResultList']:
+ tmpList = [result[1]]
+ tmpList.extend(result[0])
+ csvwriter.writerow(tmpList)
+
+def evaluateSpaceNetSolution_Total(summaryTruthFile, summaryProposalFile, resultsOutputFile='',
+ useParallelProcessing=False, minPolygonSize=0,
+ iouThreshold=0.5,
+ AOIList=['Total',
+ 'AOI_1_Rio',
+ 'AOI_2_Vegas',
+ 'AOI_3_Paris',
+ 'AOI_4_Shanghai',
+ 'AOI_5_Khartoum'],
+ debug=False
+ ):
+
+ truth_fp = summaryTruthFile
+ test_fp = summaryProposalFile
+ # check for cores available
+ if useParallelProcessing:
+
+ max_cpu = multiprocessing.cpu_count()
+ parallel = True
+ else:
+ max_cpu = 1
+ parallel = False
+
+ # initialize scene counts
+ true_pos_counts = []
+ false_pos_counts = []
+ false_neg_counts = []
+
+ t0 = time.time()
+ # Start Ingest Of Truth and Test Case
+
+ truthDF = gpd.read_file(truth_fp)
+ testDF = gpd.read_file(test_fp)
+ polyFlag = 'poly'
+
+ ## TODO projectToUTM
+
+ truthDF = truthDF.loc[truthDF.area >= minPolygonSize]
+
+ t1 = time.time()
+ total = t1 - t0
+ if debug:
+ print('time of ingest: ', total)
+
+
+
+ prop_polysPoly = testDF.geometry.values
+ sol_polysPoly = truthDF.geometry.values
+ bad_count = 0
+ F1ScoreList = []
+ cpu_count = min(multiprocessing.cpu_count(), max_cpu)
+ if debug:
+ print('{}'.format(max_cpu))
+ p = multiprocessing.Pool(processes=cpu_count)
+ ResultList = []
+
+ truthIndex = truthDF.sindex
+ eval_function_input_list = [['ImageId', prop_polysPoly, sol_polysPoly, truthIndex]]
+
+
+ # Calculate Values
+ t3 = time.time()
+ if debug:
+ print('time For DataCreation {}s'.format(t3 - t1))
+
+ # result_list = p.map(eT.evalfunction, eval_function_input_list)
+ if parallel == False:
+ result_list = []
+ for eval_input in eval_function_input_list:
+ if resultsOutputFile != '':
+ result_list.append(eT.evalfunction(eval_input,
+ resultGeoJsonName=os.path.splitext(resultsOutputFile)[0]+"_"+eval_input[0]+".geojson",
+ threshold=iouThreshold))
+ else:
+ result_list.append(eT.evalfunction(eval_input,
+ threshold=iouThreshold))
+ else:
+ result_list = p.map(eT.evalfunction, eval_function_input_list)
+
+ result_listNP = np.asarray([item[0] for item in result_list])
+ result_listName = [item[1] for item in result_list]
+ AOIIndexList = []
+ resultsDictList = []
+ for AOI in AOIList:
+ if AOI !='Total':
+ AOIIndex = [i for i, s in enumerate(result_listName) if AOI in s]
+ AOIIndexList.append(AOIIndex)
+ result_sum = np.sum(result_listNP[AOIIndex], axis=0)
+ else:
+ AOIIndex = [i for i, s in enumerate(result_listName) if '' in s]
+ AOIIndexList.append(AOIIndex)
+ result_sum = np.sum(result_listNP, axis=0)
+
+
+ #result_sum = np.sum(result_listNP, axis=0)
+ true_pos_total = result_sum[1]
+ false_pos_total = result_sum[2]
+ false_neg_total = result_sum[3]
+ if (float(true_pos_total) + float(false_pos_total)) > 0:
+ precision = float(true_pos_total) / (float(true_pos_total) + float(false_pos_total))
+ else:
+ precision = 0
+
+ if (float(true_pos_total) + float(false_neg_total)) > 0:
+ recall = float(true_pos_total) / (float(true_pos_total) + float(false_neg_total))
+ else:
+ recall = 0
+
+ if (precision + recall) > 0:
+ F1ScoreTotal = 2.0 * precision * recall / (precision + recall)
+ else:
+ F1ScoreTotal = 0
+
+ resultsDict = {'AOI_Name': AOI,
+ 'TruthFile': truth_fp,
+ 'ProposalFile': test_fp,
+ 'F1ScoreTotal': F1ScoreTotal,
+ 'PrecisionTotal': precision,
+ 'RecalTotal': recall,
+ 'TruePositiveTotal': true_pos_total,
+ 'FalsePositiveTotal': false_pos_total,
+ 'FalseNegativeTotal': false_neg_total,
+ 'PerImageStatsResultList': result_list,
+ 'OutputSummaryFile': resultsOutputFile}
+
+ resultsDictList.append(resultsDict)
+ writeResultsToScreen(resultsDict)
+
+
+
+
+ if resultsOutputFile != '':
+ with open(resultsOutputFile, 'w') as csvFile:
+ csvwriter = csv.writer(csvFile, delimiter=',')
+ for resultsDict in resultsDictList:
+ writeAOISummaryToCSV(resultsDict, csvwriter)
+
+ writePerChipToCSV(resultsDictList, csvwriter)
+
+
+
+
+
+
+
+ return resultsDictList
+
+
+def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList,
+ AOIList=['Total',
+ 'AOI_1_Rio',
+ 'AOI_2_Vegas',
+ 'AOI_3_Paris',
+ 'AOI_4_Shanghai',
+ 'AOI_5_Khartoum'],
+ removeGeoJsonAfter=True,
+ verbose=False):
+ srcBaseName = os.path.splitext(baseName)[0]
+ geoJsonList = glob.glob(srcBaseName+"_*.geojson")
+ if verbose:
+ print(geoJsonList)
+
+ rasterList = []
+ for rasterLocation in rasterLocationList:
+ rasterList.extend(glob.glob(os.path.join(rasterLocation, '*.tif')))
+
+
+
+ for AOI in AOIList:
+ AOIgeoJsonList = [s for i, s in enumerate(geoJsonList) if AOI in s]
+ AOIimageList = [s for i, s in enumerate(rasterList) if AOI in s]
+
+ #print AOIimageList
+ outShapeFile = srcBaseName+"_"+AOI+"Summary.shp"
+ outDriver = ogr.GetDriverByName("ESRI Shapefile")
+ # Remove output shapefile if it already exists
+ if os.path.exists(outShapeFile):
+ outDriver.DeleteDataSource(outShapeFile)
+ outDataSource = outDriver.CreateDataSource(outShapeFile)
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ outLayer = outDataSource.CreateLayer("AOI_Results", srs, geom_type=ogr.wkbPolygon)
+
+ inDataSource = ogr.Open(geoJsonList[0], 0)
+ inLayer = inDataSource.GetLayer()
+ # Add input Layer Fields to the output Layer
+ inLayerDefn = inLayer.GetLayerDefn()
+ for i in range(0, inLayerDefn.GetFieldCount()):
+ fieldDefn = inLayerDefn.GetFieldDefn(i)
+ outLayer.CreateField(fieldDefn)
+ # Get the output Layer's Feature Definition
+ outLayerDefn = outLayer.GetLayerDefn()
+
+ inLayerDefn = 0
+ inLayer = 0
+ inDataSource = 0
+ for AOIgeoJson in AOIgeoJsonList:
+ imageId = AOIgeoJson.replace(srcBaseName+'_', "").replace('.geojson', '')
+ rasterName = [s for i, s in enumerate(AOIimageList) if imageId in s]
+
+ if len(rasterName)>0:
+ rasterName = rasterName[0]
+ #print rasterName
+ inDataSource = ogr.Open(AOIgeoJson, 0)
+ inLayer = inDataSource.GetLayer()
+
+ targetRaster = gdal.Open(rasterName)
+ geomTransform = targetRaster.GetGeoTransform()
+
+ for i in range(0, inLayer.GetFeatureCount()):
+ # Get the input Feature
+ inFeature = inLayer.GetFeature(i)
+ # Create output Feature
+ outFeature = ogr.Feature(outLayerDefn)
+ # Add field values from input Layer
+ for i in range(0, outLayerDefn.GetFieldCount()):
+ outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))
+ # Set geometry as centroid
+ geom = inFeature.GetGeometryRef()
+ #print geom.ExportToWkt()
+ # [GeoWKT, PixWKT])
+ geomList = gT.pixelGeomToGeoGeom(geom, rasterName, targetSR='', geomTransform='', breakMultiPolygonPix=False)
+ #print geomList[0][0].ExportToWkt()
+ if geomList:
+ outFeature.SetGeometry(geomList[0][0])
+
+ # Add new feature to output Layer
+ outLayer.CreateFeature(outFeature)
+ outFeature = None
+ inFeature = None
+
+
+
+
+ if removeGeoJsonAfter:
+ for f in geoJsonList:
+ os.remove(f)
+
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(description='Evaluate Score for SpaceNet')
+ parser.add_argument("summaryTruthFile",
+ help="The Location of Summary Ground Truth csv File"
+ "Summary File should have a header = ImageId, BuildingId, polygonPixWKT, polygonGeoPix "
+ "Format is '{},{},{},{}.format(ImageId, BuildingId, polygonPixWKT, polygonGeoPix)',"
+ "unless --geoJson flag is set"
+ "See spaceNet competition details for more information about file format"
+ )
+ parser.add_argument("summaryProposalFile",
+ help="The Location of summary Propsal csv File"
+ "Summary File should have a header = ImageId, BuildingId, polygonPixWKT, Confidence "
+ "followed by values"
+ "Format is '{},{},{},{}.format(ImageId, BuildingId, polygonPixWKT, Confidence)'"
+ "unless --geoJson flag is set"
+ )
+ parser.add_argument("--polygonMinimumPixels",
+ help="The minimum number of pixels a polygon must have to be considered valid"
+ "The minimum for spacenet round 2 is 20 pixels",
+ type=int,
+ default=20)
+ parser.add_argument("--iouThreshold",
+ help="The IOU threshold for a True Positive"
+ "Spacenet uses 0.5",
+ type=float,
+ default=0.5)
+
+ parser.add_argument("--resultsOutputFile",
+ help="If you would like summary data outwritten to a file, specify the file",
+ default='')
+ parser.add_argument("--geoJson",
+ help='results Submitted are in geoJson Format',
+ action='store_true')
+
+ parser.add_argument("--useParallelProcessing",
+ help='Convert Image from Native format to 8bit',
+ action='store_true')
+
+ parser.add_argument("--rasterLocation",
+ help='Image Directory List',
+ action='append',
+ default=[]
+ )
+
+ args = parser.parse_args()
+ # load Truth and Test File Locations
+ AOIList = ['Total',
+ 'AOI_1_Rio',
+ 'AOI_2_Vegas',
+ 'AOI_3_Paris',
+ 'AOI_4_Shanghai',
+ 'AOI_5_Khartoum']
+ resultsOutputFile = args.resultsOutputFile
+ summaryDict = evaluateSpaceNetSolution_Total(args.summaryTruthFile,
+ args.summaryProposalFile,
+ resultsOutputFile=resultsOutputFile,
+ processgeoJson=args.geoJson,
+ useParallelProcessing=args.useParallelProcessing,
+ minPolygonSize=args.polygonMinimumPixels,
+ iouThreshold=args.iouThreshold,
+ AOIList=AOIList)
+
+
+
+ if resultsOutputFile != '':
+ combineGeoJsonAndConvertToWGS84(resultsOutputFile,
+ rasterLocationList=args.rasterLocation)
+
+
+
diff --git a/python/externalVectorProcessing.py b/python/externalVectorProcessing.py
index da921fb..157c275 100644
--- a/python/externalVectorProcessing.py
+++ b/python/externalVectorProcessing.py
@@ -10,9 +10,9 @@
def buildTindex(rasterFolder, rasterExtention='.tif'):
rasterList = glob.glob(os.path.join(rasterFolder, '*{}'.format(rasterExtention)))
- print(rasterList)
+ #print(rasterList)
- print(os.path.join(rasterFolder, '*{}'.format(rasterExtention)))
+ #print(os.path.join(rasterFolder, '*{}'.format(rasterExtention)))
memDriver = ogr.GetDriverByName('MEMORY')
gTindex = memDriver.CreateDataSource('gTindex')
@@ -54,7 +54,7 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput
else:
gTindex = ogr.Open(rasterTileIndex,0)
gTindexLayer = gTindex.GetLayer()
-
+ print(vectorSrcFile)
shapeSrc = ogr.Open(vectorSrcFile,0)
chipSummaryList = []
for feature in gTindexLayer:
@@ -77,7 +77,7 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput
if __name__ == "__main__":
- parser = argparse.ArgumentParser()
+ parser = argparse.ArgumentParser(description='This code will push a new Vector Source into already tiled imagery')
parser.add_argument("-imgDir", "--imgDir", type=str,
help="Directory of Raster Images")
parser.add_argument("-vecSrc", "--vectorSrcFile", type=str,
@@ -93,7 +93,8 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput
default='.tif')
parser.add_argument("-o", "--outputCSV", type=str,
- help="Output file name and location for truth summary CSV equivalent to SpacenetV2 competition")
+ help="Output file name and location for truth summary CSV equivalent to SpacenetV2 competition",
+ default='')
parser.add_argument("-pixPrecision", "--pixelPrecision", type=int,
help="Number of decimal places to include for pixel, uses round(xPix, pixPrecision)"
"Default = 2",
@@ -110,10 +111,10 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput
rasterFolderLocation = args.imgDir
- vectorSrcFile = args.imgDir
+ vectorSrcFile = args.vectorSrcFile
vectorPrefix = args.vectorPrefix
rasterPrefix = args.rasterPrefix
- pixPrecision = args.pixPrecision
+ pixPrecision = args.pixelPrecision
createProposalFile = args.CreateProposalFile
rasterFileExtension = args.rasterExtension
@@ -122,18 +123,21 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput
rasterFolderBaseName = os.path.basename(os.path.dirname(rasterFolderLocation))
geoJsonOutputDirectory = os.path.join(os.path.dirname(vectorSrcFile), rasterFolderBaseName)
+ print geoJsonOutputDirectory
+ if not os.path.exists(geoJsonOutputDirectory):
+ os.makedirs(geoJsonOutputDirectory)
chipSummaryList = createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutputDirectory, rasterTileIndex='',
geoJsonPrefix=vectorPrefix, rasterFileExtenstion=rasterFileExtension,
rasterPrefixToReplace=rasterPrefix
)
-
- outputCSVFileName = geoJsonOutputDirectory+"OSM_Proposal.csv"
- lT.createCSVSummaryFile(chipSummaryList, outputCSVFileName,
- replaceImageID=rasterPrefix+"_",
- pixPrecision=pixPrecision,
- createProposalsFile=createProposalFile
- )
+ if args.outputCSV != '':
+ outputCSVFileName = args.outputCSV
+ lT.createCSVSummaryFile(chipSummaryList, outputCSVFileName,
+ replaceImageID=rasterPrefix+"_",
+ pixPrecision=pixPrecision,
+ createProposalsFile=createProposalFile
+ )
diff --git a/python/mbtilesConvertScratch.py b/python/mbtilesConvertScratch.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/spaceNetUtilities/__init__.pyc b/python/spaceNetUtilities/__init__.pyc
index ab294b9..9cead97 100644
Binary files a/python/spaceNetUtilities/__init__.pyc and b/python/spaceNetUtilities/__init__.pyc differ
diff --git a/python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py b/python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py
new file mode 100644
index 0000000..1316373
--- /dev/null
+++ b/python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py
@@ -0,0 +1,380 @@
+from osgeo import ogr, gdal, osr
+from spaceNetUtilities import geoToolsGPD as gT
+import json
+import os
+import subprocess
+import geopandas as gpd
+from geopandas.geoseries import Polygon
+import geopandas_osm
+
+def returnBoundBoxM(tmpGeom, metersRadius=50):
+ poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(),
+ metersRadius)
+
+ polyEnv = poly.envelope
+
+
+
+
+ return polyEnv
+def createPolygonShapeFileFromOSM(polyBounds, pointOfInterestList, outVectorFile='', debug=True):
+ gdfOSM = gpd.GeoDataFrame([])
+ for pointOfInterestKey in pointOfInterestList.keys():
+ gdfPOITemp = geopandas_osm.osm.query_osm('way', polyBounds, recurse='down', tags='{}'.format(pointOfInterestKey))
+ gdfOSM = gdfOSM.concat(gdfPOITemp)
+
+ gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOSM, outVectorFile=outVectorFile)
+
+ return gdfFinal
+
+
+def createPolygonShapeFileFromShapefile(srcVectorFile, pointOfInterestList, outVectorFile=''):
+
+ gdfOrig = gpd.read_file(srcVectorFile)
+ gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOrig, outVectorFile=outVectorFile)
+
+ return gdfFinal
+
+
+
+def createPolygonShapeFileGPD(pointOfInterestList, gdfOrig, outVectorFile=''):
+
+ firstKey = True
+ ## Iterage through keys for valid points of interest, These correspond to OSM Tags such as Power or Places.
+ for pointOfInterestKey in pointOfInterestList.keys():
+ ## Select from Vector File all rows where Point of Interest Columns is not an empty String
+ gdfKey = gdfOrig[gdfOrig[pointOfInterestKey] != '']
+
+ # Get list of defined features in the set Tag List, i.e. power=objectName
+
+ objectNameList = pointOfInterestList[pointOfInterestKey].keys()
+
+ # iterate through object names to apply specific actions with regard to the object like bounding box size.
+
+ for objectName in objectNameList:
+ # select only those objects where the key=object i.e power=tower
+ gdfPOITemp = gdfKey[gdfKey[pointOfInterestKey] == objectName]
+
+ # "compensator": {
+ # "featureIdNum": 1,
+ # "featureChipScaleM": 200,
+ # "featureBBoxSizeM": 5
+ # },
+ # project subset to UTM so that actions can be done with regard to meters
+ gdfPOITempUTM = ox.project_gdf(gdfPOITemp)
+ # buffer geometry to create circle around point with x meters
+ gdfPOITempUTM.geometry=gdfPOITempUTM.buffer(pointOfInterestList[pointOfInterestKey][objectName]['featureBBoxSizeM'])
+ # calculate the envelope of the circle to create a bounding box for each object
+ gdfPOITempUTM.geometry=gdfPOITempUTM.envelope
+
+ # reporject to wgs84 (lat long)
+ gdfPOITemp = ox.project_gdf(gdfPOITempUTM, to_latlong=True)
+ # assign a name to spacefeat for future use in the vector: "power:tower"
+ gdfPOITemp['spacefeat']=pointOfInterestList[pointOfInterestKey][objectName]['spaceFeatureName']
+
+ # if first key, create final gdf
+ if firstKey:
+ gdfFinal = gdfPOITemp
+ firstKey = False
+ else:
+ # else add new to end of gdfFinal
+ gdfFinal = gdfFinal.concat(gdfPOITemp)
+
+ if outVectorFile != '':
+ gdfFinal.to_file(outVectorFile)
+
+ return gdfFinal
+
+def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList):
+
+ ## Turn Point File into PolygonFile
+
+
+ srcDS = ogr.Open(srcVectorFile, 0)
+ inLayer = srcDS.GetLayer()
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ outDriver = ogr.GetDriverByName("GeoJSON")
+ if os.path.exists(outVectorFile):
+ outDriver.DeleteDataSource(outVectorFile)
+
+ dstDS = outDriver.CreateDataSource(outVectorFile)
+
+ outLayer = dstDS.CreateLayer('polygons', srs, geom_type=ogr.wkbPolygon)
+
+ # Get Attribute names
+ inLayerDefn = inLayer.GetLayerDefn()
+ for i in range(0, inLayerDefn.GetFieldCount()):
+ fieldDefn = inLayerDefn.GetFieldDefn(i)
+ outLayer.CreateField(fieldDefn)
+
+ outLayerDefn = outLayer.GetLayerDefn()
+
+ # Copy Features
+ for i in range(0, inLayer.GetFeatureCount()):
+
+ inFeature = inLayer.GetFeature(i)
+ outFeature = ogr.Feature(outLayerDefn)
+ for i in range(0, outLayerDefn.GetFieldCount()):
+ outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))
+ # Set geometry as centroid
+ tmpGeom = inFeature.GetGeometryRef()
+ poly = returnBoundBoxM(tmpGeom, metersRadius=pointOfInterestList[inFeature.GetField('spaceNetFeature')]['featureBBoxSizeM'])
+
+ outFeature.SetGeometry(poly)
+
+ outLayer.CreateFeature(outFeature)
+ outFeature = None
+ inFeature = None
+
+ srcDS = None
+ dstDS = None
+
+def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName='',
+ className='',
+ outputDirectory='',
+ seperateImageFolders=False,
+ minPartialToInclue = 0.70,
+ fieldName='spacefeat'):
+
+
+ chipSummaryList = []
+ srcDS = srcDS = ogr.Open(srcVectorFile, 0)
+ srcLayer = srcDS.GetLayer()
+
+
+ shapeSrcList = []
+ for shapeFileSrc in shapeFileSrcList:
+ print(shapeFileSrc[1])
+ shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]])
+
+ ## determinePixelSize in Meters
+ print rasterFileList
+ print rasterFileList[0][0]
+ srcRaster = gdal.Open(rasterFileList[0][0])
+ geoTransform = srcRaster.GetGeoTransform()
+ metersPerPix = abs(round(geoTransform[5]*111111,1))
+
+ imgId = 0
+ for feature in srcLayer:
+
+ geom = feature.GetGeometryRef()
+
+ if geom.GetGeometryName != 'POINT':
+ geom = geom.Centroid()
+
+
+ spacenetFeatureName = feature.GetField('spaceNetFeature')
+ if seperateImageFolders:
+ className = spacenetFeatureName.replace(' ', '')
+ else:
+ className = ''
+ clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM']
+ halfClipSizeXm = round((clipSize/metersPerPix)/2)
+
+ xCenter = geom.GetX()
+ yCenter = geom.GetY()
+
+ maxXCut = xCenter + halfClipSizeXm*geoTransform[1]
+ maxYCut = yCenter + abs(halfClipSizeXm*geoTransform[5])
+ minYCut = yCenter - abs(halfClipSizeXm*geoTransform[5])
+ minXCut = xCenter - halfClipSizeXm*geoTransform[1]
+
+ imgId = imgId + 1
+
+ chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=[],
+ minpartialPerc=minPartialToInclue,
+ outputPrefix='',
+ createPix=False,
+ rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"),
+ className=className,
+ baseName=baseName,
+ imgId=imgId)
+
+ chipSummaryList.append(chipSummary)
+
+ return chipSummaryList
+
+
+def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500):
+
+
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMax = latCutOff
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMax = lonCutOff
+
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+
+ subprocess.call(cmd)
+ longMin = -500
+ latMin = -500
+ if latCutOff == -500:
+ latMax = 500
+ else:
+ latMin = latCutOff
+ latMax = 500
+
+
+ if lonCutOff == -500:
+ longMax = 500
+ else:
+ longMin = lonCutOff
+ longMax = 500
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson,
+ '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)]
+ subprocess.call(cmd)
+
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'):
+
+
+ df = gpd.read_file(geoJsonIn, encoding=encoding)
+ df['y'] = df.geometry.map(lambda p: p.y)
+ df['x'] = df.geometry.map(lambda p: p.x)
+ df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True)
+ df.drop(['x', 'y'], 1, inplace=True)
+
+ if geoJsonOut=='':
+ geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson')
+ df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding)
+
+ return geoJsonOut
+
+def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'):
+
+ insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)])
+ df = gpd.read_file(geoJson, encoding=encoding)
+
+ outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson')
+ df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding)
+
+ outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson')
+ df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding)
+
+ return outputGeoJsonTrain, outputGeoJsonTest
+
+
+
+
+
+
+
+
+if __name__ == '__main__':
+
+ createOutVectorFile = True
+ srcVectorFile = '/path/To/PointOfInterestSummary.geojson'
+ outVectorFile = '/path/To/PolygonOfInterestSummary.geojson'
+ outputDirectory = '/path/To/processedDataPOI/'
+ baseName = 'AOI_3_Paris'
+ featureDescriptionJson = '../configFiles/OSM_Power.json'
+ className = 'POIAll'
+ seperateImageFolders = False
+ splitGeoJson = False
+ deduplicateGeoJsonFlag = True
+
+ if deduplicateGeoJsonFlag:
+ srcVectorFile = deduplicateGeoJson(srcVectorFile)
+
+ splitGeoJson_latMin = -22.94
+ splitGeoJson_latMax = 90
+ splitGeoJson_lonMin = -43.25
+ splitGeoJson_lonMax = 180
+
+ srcVectorFileList = [[srcVectorFile, 'all']]
+
+ #if splitGeoJson:
+ # srcVectorTrain, srcVectorTest = splitVectorFileGPD(srcVectorFile,
+ # latMin=splitGeoJson_latMin,
+ # latMax=splitGeoJson_latMax,
+ # lonMin=splitGeoJson_lonMin,
+ # lonMax=splitGeoJson_lonMax,
+ # )
+
+ # srcVectorFileList = [
+ # [srcVectorTrain, 'train'],
+ # [srcVectorTest, 'test']
+ # ]
+
+
+
+
+
+ # List of Raster Images to Chip and an appropriate label.
+ # This list will take any type of Raster supported by GDAL
+ # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one
+ # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing
+ rasterFileList = [['/path/To/Pan-VRT.vrt', 'PAN'],
+ ['/path/To/MUL-VRT.vrt', 'MUL']
+ ['/path/To/RGB-PanSharpen-VRT.vrt', 'RGB-PanSharpen']
+ ['/path/To/MUL-PanSharpen-VRT.vrt', 'MUL-PanSharpen']
+ ]
+
+
+
+
+ ### Define Point of Interest Dictionary
+ # {'spacenetFeatureName':
+ # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping
+ # 'featureChipScaleM': 200, # Size of chip to create in meters
+ # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point
+ # # indicating in the geojson. This will be used in creating polygons for IOU scoring
+ # }
+
+ with open(featureDescriptionJson, 'r') as j:
+ pointOfInterestList = json.load(j)
+
+ # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size.
+ for srcVectorFile, folderType in srcVectorFileList:
+ outVectorFile = srcVectorFile.replace('.geojson', 'poly.shp')
+ if createOutVectorFile:
+ createPolygonShapeFileGPD(srcVectorFile, pointOfInterestList, outVectorFile=outVectorFile)
+
+ outputDirectoryTmp = os.path.join(outputDirectory, folderType)
+
+ shapeFileSrcList = [
+ [outVectorFile, 'POIAll']
+ ]
+ # create Folder Structure to place files into.
+
+ for rasterFile in rasterFileList:
+ for keyName in pointOfInterestList.keys():
+ tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', ''))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ for objectName in pointOfInterestList[keyName].keys():
+ tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', ''), objectName.replace(" ",""))
+ if not os.path.exists(tmpPath):
+ os.makedirs(tmpPath)
+
+ # create Processed Point of Interest Data.
+ createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList,
+ baseName=baseName,
+ className='',
+ outputDirectory=outputDirectoryTmp,
+ seperateImageFolders=seperateImageFolders,
+ minPartialToInclue=0.70)
+
+
+
+
+
diff --git a/python/spaceNetUtilities/evalGraphTools.pyc b/python/spaceNetUtilities/evalGraphTools.pyc
new file mode 100644
index 0000000..313e7b2
Binary files /dev/null and b/python/spaceNetUtilities/evalGraphTools.pyc differ
diff --git a/python/spaceNetUtilities/evalTools.py b/python/spaceNetUtilities/evalTools.py
index 68cb183..f4e591d 100644
--- a/python/spaceNetUtilities/evalTools.py
+++ b/python/spaceNetUtilities/evalTools.py
@@ -129,7 +129,7 @@ def score(test_polys, truth_polys, threshold=0.5, truth_index=[],
return true_pos_count, false_pos_count, false_neg_count
-def evalfunction((image_id, test_polys, truth_polys, truth_index),
+def evalfunction(image_id, test_polys, truth_polys, truth_index,
resultGeoJsonName=[],
threshold = 0.5):
@@ -157,7 +157,7 @@ def evalfunction((image_id, test_polys, truth_polys, truth_index),
return ((F1score, true_pos_count, false_pos_count, false_neg_count), image_id)
-def create_eval_function_input((image_ids, (prop_polysIdList, prop_polysPoly), (sol_polysIdsList, sol_polysPoly))):
+def create_eval_function_input(image_ids, prop_polysIdList, prop_polysPoly, sol_polysIdsList, sol_polysPoly):
evalFunctionInput = []
diff --git a/python/spaceNetUtilities/evalTools.pyc b/python/spaceNetUtilities/evalTools.pyc
index 1c9bf52..eefc2fc 100644
Binary files a/python/spaceNetUtilities/evalTools.pyc and b/python/spaceNetUtilities/evalTools.pyc differ
diff --git a/python/spaceNetUtilities/evalToolsGPD.ipynb b/python/spaceNetUtilities/evalToolsGPD.ipynb
new file mode 100644
index 0000000..d3065b1
--- /dev/null
+++ b/python/spaceNetUtilities/evalToolsGPD.ipynb
@@ -0,0 +1,460 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "import sys\n",
+ "sys.path.extend(['/Users/dlindenbaum/cosmiQGit/spaceNetUtilities_DaveL/python'])\n",
+ "#from spaceNetUtilities import evalToolsGPD as eT\n",
+ "#from spaceNetUtilities import geoToolsGPD as gT"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n"
+ ]
+ }
+ ],
+ "source": [
+ "import geopandas as gpd\n",
+ "#srcTruthVector = '/Users/dlindenbaum/dataStorage/spaceNetPrivate_Truth/AOI_2_Vegas/srcData/vectorData/Vegas_Footprints.shp'\n",
+ "srcTestVector1 = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C2-056155973080_01_P001.shp'\n",
+ "srcTestVector2 = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C3-056155973080_01_P001.shp'\n",
+ "srcTileIndex = '/Users/dlindenbaum/dataStorage/xd_Vegas/mul-pansharpen.shp'"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n"
+ ]
+ }
+ ],
+ "source": [
+ "#truthDF = gpd.read_file(srcTruthVector)\n",
+ "testDF1 = gpd.read_file(srcTestVector1)\n",
+ "testDF2 = gpd.read_file(srcTestVector2)\n",
+ "srcTileDF = gpd.read_file(srcTileIndex)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "testIndex1 = testDF1.sindex\n",
+ "testIndex2 = testDF2.sindex"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 38,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "srcTindex1 = srcTileDF.loc[srcTileDF.contains(testDF1.geometry.values[0].centroid)]\n",
+ "srcTindex2 = srcTileDF.loc[srcTileDF.contains(testDF2.geometry.values[0].centroid)]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "testDF1['location'] = srcTindex1['location'].values[0]\n",
+ "testDF2['location'] = srcTindex2['location'].values[0]\n",
+ "testDF1['index1'] = testDF1.index\n",
+ "testDF2['index2'] = testDF2.index"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Edge\ntest1 Length of Matchs = 0\ntest2 Length of Matchs = 0\nEdge\ntest1 Length of Matchs = 0\ntest2 Length of Matchs = 36\nEdge\ntest1 Length of Matchs = 0\ntest2 Length of Matchs = 45\nEdge\ntest1 Length of Matchs = 41\ntest2 Length of Matchs = 29\n"
+ ]
+ }
+ ],
+ "source": [
+ "from shapely.geometry import LineString\n",
+ "coordsList = list(srcTindex2.geometry.values[0].exterior.coords)\n",
+ "lineStringList = []\n",
+ "for idx in range(1, len(coordsList)):\n",
+ " lineStringList.append(LineString([coordsList[idx-1], coordsList[idx]]))\n",
+ "\n",
+ "pixelDimension = 0.0000027\n",
+ "numPixel = 5\n",
+ "\n",
+ "for lineboundary in lineStringList:\n",
+ " print(\"Edge\")\n",
+ " print('test1 Length of Matchs = {}'.format(len(list(testIndex1.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds)))))\n",
+ " print('test2 Length of Matchs = {}'.format(len(list(testIndex2.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds)))))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'LINESTRING (-115.2632808 36.24184080000634, -115.2632808 36.2639592)'"
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "lineStringList[3].wkt"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 25,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "lineboundary = lineStringList[3]\n",
+ "numPixel = 2\n",
+ "testDF1Intersection = testDF1.iloc[list(testIndex1.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds))]\n",
+ "testDF2Intersection = testDF2.iloc[list(testIndex2.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds))]\n",
+ "testDF1Intersection.geometry = testDF1Intersection.buffer(numPixel*pixelDimension)\n",
+ "testDF2Intersection.geometry = testDF2Intersection.buffer(numPixel*pixelDimension)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 26,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "res_union = gpd.overlay(testDF1Intersection, testDF2Intersection, how='union')\n",
+ "res_intersection = gpd.overlay(testDF1Intersection, testDF2Intersection, how='intersection')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 27,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "union shape (23, 5)\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "1 2.145807e-09\n5 8.390497e-10\n15 8.123570e-10\n21 2.279853e-10\n24 9.676290e-11\ndtype: float64"
+ ]
+ },
+ "execution_count": 27,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "print(\"union shape {}\".format(res_union[(res_union['raster_val']==1.0) & (res_union['raster_val_2']==1.0)].shape))\n",
+ "res_union[(res_union['raster_val']==1.0) & (res_union['raster_val_2']==1.0)].head().area"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 28,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "intersection shape (23, 5)\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "0 2.145807e-09\n1 8.390497e-10\n2 8.123570e-10\n3 2.279853e-10\n4 9.676290e-11\ndtype: float64"
+ ]
+ },
+ "execution_count": 28,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "print(\"intersection shape {}\".format(res_intersection[(res_intersection['raster_val']==1.0) & (res_intersection['raster_val_2']==1.0)].shape))\n",
+ "res_intersection[(res_intersection['raster_val']==1.0) & (res_intersection['raster_val_2']==1.0)].head().area"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 29,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " geometry | \n",
+ " location | \n",
+ " location_2 | \n",
+ " raster_val | \n",
+ " raster_val_2 | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " POLYGON ((-115.2633455998505 36.2419426698028,... | \n",
+ " 15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF | \n",
+ " None | \n",
+ " 1.0 | \n",
+ " NaN | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " POLYGON ((-115.2632862000001 36.24283980000606... | \n",
+ " 15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF | \n",
+ " 15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF | \n",
+ " 1.0 | \n",
+ " 1.0 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " POLYGON ((-115.2632821499251 36.24284502199291... | \n",
+ " 15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF | \n",
+ " None | \n",
+ " 1.0 | \n",
+ " NaN | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " POLYGON ((-115.2632821499251 36.24284502199291... | \n",
+ " None | \n",
+ " 15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF | \n",
+ " NaN | \n",
+ " 1.0 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " POLYGON ((-115.2632800697965 36.243042300006, ... | \n",
+ " None | \n",
+ " 15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF | \n",
+ " NaN | \n",
+ " 1.0 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " geometry | \n",
+ " location | \n",
+ " location_2 | \n",
+ " raster_val | \n",
+ " raster_val_2 | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " POLYGON ((-115.2633455998505 36.2419426698028,... | \n",
+ " 15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF | \n",
+ " None | \n",
+ " 1.0 | \n",
+ " NaN | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " POLYGON ((-115.2632862000001 36.24283980000606... | \n",
+ " 15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF | \n",
+ " 15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF | \n",
+ " 1.0 | \n",
+ " 1.0 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " POLYGON ((-115.2632821499251 36.24284502199291... | \n",
+ " 15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF | \n",
+ " None | \n",
+ " 1.0 | \n",
+ " NaN | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " POLYGON ((-115.2632821499251 36.24284502199291... | \n",
+ " None | \n",
+ " 15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF | \n",
+ " NaN | \n",
+ " 1.0 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " POLYGON ((-115.2632800697965 36.243042300006, ... | \n",
+ " None | \n",
+ " 15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF | \n",
+ " NaN | \n",
+ " 1.0 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ]
+ },
+ "execution_count": 29,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "res_union.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 33,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Using matplotlib backend: MacOSX\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib auto"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 36,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 36,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "res_union[(res_union['raster_val']==1.0) & (res_union['raster_val_2']==1.0)].head().plot()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 37,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 37,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "res_union.plot()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ ""
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2.0
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
\ No newline at end of file
diff --git a/python/spaceNetUtilities/evalToolsGPD.py b/python/spaceNetUtilities/evalToolsGPD.py
new file mode 100644
index 0000000..c8630a8
--- /dev/null
+++ b/python/spaceNetUtilities/evalToolsGPD.py
@@ -0,0 +1,179 @@
+import numpy as np
+import geoToolsGPD as gT
+from shapely.geometry import mapping, Polygon
+import fiona
+import os
+from tqdm import tqdm
+def iou(test_poly, truth_polys, truth_index=[]):
+ fidlistArray = []
+ iou_list = []
+ if truth_index:
+ fidlist = gT.search_rtree(test_poly, truth_index)
+ #print(test_poly)
+ for fid in fidlist:
+ if not test_poly.is_valid:
+ test_poly = test_poly.buffer(0.0)
+
+ intersection_result = test_poly.intersection(truth_polys[fid])
+ fidlistArray.append(fid)
+
+ if intersection_result.geom_type == 'Polygon' or \
+ intersection_result.geom_type == 'MultiPolygon':
+ intersection_area = intersection_result.area
+ union_area = test_poly.union(truth_polys[fid]).area
+ iou_list.append(intersection_area / union_area)
+
+ else:
+ iou_list.append(0)
+
+
+ return iou_list, fidlistArray
+
+def write_geojson(geojson_name,
+ feature_list,
+ output_crs={'init': 'epsg:4326'},
+ output_schema={'geometry': 'Polygon',
+ 'properties': {'ImageId': 'str',
+ 'IOUScore': 'float:15.5',
+ 'BuildingId': 'int'}
+ },
+ output_driver='GeoJSON'
+ ):
+ with fiona.open(geojson_name,'w',
+ driver=output_driver,
+ crs=output_crs,
+ schema=output_schema) as sink:
+
+ for feature in feature_list:
+ sink.write(feature)
+
+
+
+
+def score(test_polys, truth_polys, threshold=0.5, truth_index=[],
+ resultGeoJsonName = [],
+ imageId = []):
+
+ # Define internal functions
+ #
+ output_schema = {'geometry': 'Polygon',
+ 'properties': {'ImageId': 'str',
+ 'IOUScore': 'float:15.5',
+ 'BuildingId': 'int'}
+ }
+ output_crs = {'init': 'epsg:4326'}
+
+ # Find detections using threshold/argmax/IoU for test polygons
+ true_pos_count = 0
+ false_pos_count = 0
+ truth_poly_count = len(truth_polys)
+
+ if resultGeoJsonName:
+ if not imageId:
+ imageId = os.path.basename(os.path.splitext(resultGeoJsonName)[0])
+
+ feature_list = []
+
+ for test_poly in tqdm(test_polys):
+ if truth_polys:
+ iou_list, fidlist = iou(test_poly, truth_polys, truth_index)
+ if not iou_list:
+ maxiou = 0
+ else:
+ maxiou = np.max(iou_list)
+
+ if maxiou >= threshold:
+ true_pos_count += 1
+ truth_index.delete(fidlist[np.argmax(iou_list)], truth_polys[fidlist[np.argmax(iou_list)]].bounds)
+ #del truth_polys[fidlist[np.argmax(iou_list)]]
+
+ feature = {'geometry': mapping(test_poly),
+ 'properties': {'ImageId': imageId,
+ 'IOUScore': maxiou,
+ 'BuildingId': fidlist[np.argmax(iou_list)]
+ }
+ }
+
+
+
+ else:
+ false_pos_count += 1
+
+ feature = {'geometry': mapping(test_poly),
+ 'properties': {'ImageId': imageId,
+ 'IOUScore': maxiou,
+ 'BuildingId': -1
+ }
+ }
+
+
+
+
+
+
+
+
+
+ else:
+ false_pos_count += 1
+ feature = {'geometry': mapping(test_poly),
+ 'properties': {'ImageId': imageId,
+ 'IOUScore': 0,
+ 'BuildingId': 0
+ }
+ }
+
+
+ feature_list.append(feature)
+
+ if resultGeoJsonName:
+ write_geojson(resultGeoJsonName, feature_list)
+
+
+ false_neg_count = truth_poly_count - true_pos_count
+
+ return true_pos_count, false_pos_count, false_neg_count
+
+
+def evalfunction_GPD(image_id, test_polys, truth_polys, truth_index,
+ resultGeoJsonName=[],
+ threshold = 0.5):
+
+
+ if len(truth_polys)==0:
+ true_pos_count = 0
+ false_pos_count = len(test_polys)
+ false_neg_count = 0
+ else:
+ true_pos_count, false_pos_count, false_neg_count = score(test_polys, truth_polys.tolist(),
+ truth_index=truth_index,
+ resultGeoJsonName=resultGeoJsonName,
+ imageId=image_id,
+ threshold=threshold
+ )
+
+
+ if (true_pos_count > 0):
+
+ precision = float(true_pos_count) / (float(true_pos_count) + float(false_pos_count))
+ recall = float(true_pos_count) / (float(true_pos_count) + float(false_neg_count))
+ F1score = 2.0 * precision * recall / (precision + recall)
+ else:
+ F1score = 0
+ return ((F1score, true_pos_count, false_pos_count, false_neg_count), image_id)
+
+
+def create_eval_function_input(image_ids, prop_polysIdList, prop_polysPoly, sol_polysIdsList, sol_polysPoly):
+
+ evalFunctionInput = []
+
+
+ for image_id in image_ids:
+ test_polys = prop_polysPoly[np.argwhere(prop_polysIdList == image_id).flatten()]
+ truth_polys = sol_polysPoly[np.argwhere(sol_polysIdsList == image_id).flatten()]
+ truth_index = gT.create_rtree_from_poly(truth_polys)
+ evalFunctionInput.append([image_id, test_polys, truth_polys, truth_index])
+
+ return evalFunctionInput
+
+
diff --git a/python/spaceNetUtilities/evalToolsGPD.pyc b/python/spaceNetUtilities/evalToolsGPD.pyc
new file mode 100644
index 0000000..50fba47
Binary files /dev/null and b/python/spaceNetUtilities/evalToolsGPD.pyc differ
diff --git a/python/spaceNetUtilities/geoTools.pyc b/python/spaceNetUtilities/geoTools.pyc
index 9ae04e8..5b804fd 100644
Binary files a/python/spaceNetUtilities/geoTools.pyc and b/python/spaceNetUtilities/geoTools.pyc differ
diff --git a/python/spaceNetUtilities/geoToolsGPD.py b/python/spaceNetUtilities/geoToolsGPD.py
new file mode 100644
index 0000000..507513f
--- /dev/null
+++ b/python/spaceNetUtilities/geoToolsGPD.py
@@ -0,0 +1,1086 @@
+from osgeo import gdal, osr, ogr
+import numpy as np
+import os
+import csv
+import subprocess
+import math
+import geopandas as gpd
+import shapely
+import pandas as pd
+import rasterio as rio
+import affine as af
+from shapely.geometry import Point
+import pyproj
+from shapely.geometry.polygon import Polygon
+from shapely.geometry.multipolygon import MultiPolygon
+from shapely.geometry.linestring import LineString
+from shapely.geometry.multilinestring import MultiLineString
+from functools import partial
+try:
+ import centerline
+ import osmnx
+except:
+ print("rtree not installed, Will break evaluation code")
+
+
+def import_summary_geojsonGPD(geojsonfilename, removeNoBuildings=True):
+ """read summary spacenetV2 geojson into geopandas dataFrame.
+
+ Keyword arguments:
+ geojsonfilename -- geojson to read
+ removeNoBuildings -- remove all samples with BuildingId == -1 (default =True)
+ """
+
+
+ buildingList_df = gpd.read_file(geojsonfilename)
+
+
+ if removeNoBuildings:
+ buildingList_df = buildingList_df[buildingList_df['BuildingId']!=-1]
+
+ buildingList_df['poly'] = buildingList_df.geometry
+
+ return buildingList_df
+
+
+def import_chip_geojson(geojsonfilename, ImageId=''):
+ """read spacenetV2 chip geojson into geopandas dataFrame.
+
+ Keyword arguments:
+ geojsonfilename -- geojson to read
+ ImageId -- Specify ImageId. If not specified. ImageId is defined by
+ os.path.splitext(os.path.basename(geojsonfilename))[0]
+ """
+
+ buildingList_df = gpd.read_file(geojsonfilename)
+
+ datasource = ogr.Open(geojsonfilename, 0)
+ if ImageId=='':
+ ImageId = os.path.splitext(os.path.basename(geojsonfilename))[0]
+
+ buildingList_df['ImageId']=ImageId
+ buildingList_df['BuildingId'] = range(1, len(buildingList_df) + 1)
+ buildingList_df['poly'] = buildingList_df.geometry #[shapely.wkt.loads(x) for x in buildingList_df.geometry.values]
+
+ return buildingList_df
+
+
+def mergePolyList(geojsonfilename):
+ """read geoJson and return dataframe of unary_union
+
+ Keyword arguments:
+ geojsonfilename -- geojson to read
+
+ """
+
+
+ buildingList_df = gpd.read_file(geojsonfilename)
+
+
+ return buildingList_df.unary_union
+
+def readwktcsv(csv_path):
+ """read spacenetV2 csv and return geopandas dataframe
+
+ Keyword arguments:
+
+ csv_path -- path to csv of spacenetV2 ground truth or solution submission format
+ csv Format Expected = ['ImageId', 'BuildingId', 'PolygonWKT_Pix', 'PolygonWKT_Geo'] or
+ csv Format Expected = ['ImageId', 'BuildingId', 'PolygonWKT', 'Confidence']
+
+ see https://community.topcoder.com/longcontest/?module=ViewProblemStatement&rd=16892&pm=14551 to
+ learn more about the spacenetV2 csv formats
+ """
+ #
+
+
+ df = pd.read_csv(csv_path)
+ crs={}
+ if 'PolygonWKT_Geo' in df.columns:
+ geometry = [shapely.wkt.loads(x) for x in df['PolygonWKT_Geo'].values]
+ crs = {'init': 'epsg:4326'}
+ elif 'PolygonWKT_Pix' in df.columns:
+ geometry = [shapely.wkt.loads(x) for x in df['PolygonWKT_Pix'].values]
+ elif 'PolygonWKT' in df.columns:
+ geometry = [shapely.wkt.loads(x) for x in df['PolygonWKT'].values]
+
+ else:
+ print('Eror No Geometry Column detected, column must be called "PolygonWKT_Geo", "PolygonWKT_Pix", or "PolygonWKT"')
+ return -1
+
+ geo_df = gpd.GeoDataFrame(df, crs=crs, geometry=geometry)
+
+ return geo_df
+
+
+def exporttogeojson(geojsonfilename, geo_df):
+ """Write geopandas dataframe to geo_df
+
+ Keyword arguments:
+ geojsonfilename -- geojson to create
+ geo_df -- geopandas dataframe
+
+ """
+
+ #geo_df.to_file(geojsonfilename, driver='GeoJSON', crs=from_epsg(4326))
+ geo_df.to_file(geojsonfilename, driver='GeoJSON')
+
+ return geojsonfilename
+
+
+def createmaskfrompolygons(polygons):
+ ## todo implement through label tools
+ pass
+ ## see labelTools createRasterFromGeoJson
+
+
+def geomGeo2geomPixel(geom, affineObject=[], input_raster='', gdal_geomTransform=[]):
+ # This function transforms a shapely geometry in geospatial coordinates into pixel coordinates
+ # geom must be shapely geometry
+ # affineObject = rasterio.open(input_raster).affine
+ # gdal_geomTransform = gdal.Open(input_raster).GetGeoTransform()
+ # input_raster is path to raster to gather georectifcation information
+ if not affineObject:
+ if input_raster != '':
+ affineObject = rio.open(input_raster).affine
+ else:
+ affineObject = af.Affine.from_gdal(gdal_geomTransform)
+
+ affineObjectInv = ~affineObject
+
+ geomTransform = shapely.affinity.affine_transform(geom,
+ [affineObjectInv.a,
+ affineObjectInv.b,
+ affineObjectInv.d,
+ affineObjectInv.e,
+ affineObjectInv.xoff,
+ affineObjectInv.yoff]
+ )
+
+ return geomTransform
+
+
+def geomPixel2geomGeo(geom, affineObject=[], input_raster='', gdal_geomTransform=[]):
+ # This function transforms a shapely geometry in pixel coordinates into geospatial coordinates
+ # geom must be shapely geometry
+ # affineObject = rasterio.open(input_raster).affine
+ # gdal_geomTransform = gdal.Open(input_raster).GetGeoTransform()
+ # input_raster is path to raster to gather georectifcation information
+ if not affineObject:
+ if input_raster != '':
+ affineObject = rio.open(input_raster).affine
+ else:
+ affineObject = af.Affine.from_gdal(gdal_geomTransform)
+
+
+ geomTransform = shapely.affinity.affine_transform(geom,
+ [affineObject.a,
+ affineObject.b,
+ affineObject.d,
+ affineObject.e,
+ affineObject.xoff,
+ affineObject.yoff]
+ )
+
+ return geomTransform
+
+
+
+def returnBoundBox(xCenter, yCenter, pixDim, affineObject=[], input_raster='', gdal_geomTransform=[], pixelSpace=False):
+
+ geom = Point(xCenter, yCenter)
+ geom = geom.buffer(pixDim)
+ geom = geom.envelope
+
+ if not pixelSpace:
+ geom = geomPixel2geomGeo(geom, affineObject=affineObject, input_raster=input_raster, gdal_geomTransform=gdal_geomTransform)
+ else:
+ geom
+
+ return geom
+
+
+def createBoxFromLine(tmpGeom, ratio=1, halfWidth=-999, transformRequired=True, transform_WGS84_To_UTM='', transform_UTM_To_WGS84=''):
+ # create Polygon Box Oriented with the line
+
+ if transformRequired:
+ if transform_WGS84_To_UTM == '':
+ transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(tmpGeom)
+
+ tmpGeom = shapely.ops.tranform(transform_WGS84_To_UTM, tmpGeom)
+
+
+
+
+ # calculatuate Centroid
+
+ lengthM = tmpGeom.length
+ if halfWidth ==-999:
+ halfWidth = lengthM/(2*ratio)
+
+ polyGeom = tmpGeom.buffer(halfWidth, cap_style=shapely.geometry.CAP_STYLE.flat)
+
+ angRad = math.atan2((tmpGeom.coords[1][1]-tmpGeom.coords[0][1],
+ tmpGeom.coords[1][0] - tmpGeom.coords[0][0]))
+
+ areaM = polyGeom.area
+
+ if transformRequired:
+ polyGeom = shapely.ops.tranform(transform_UTM_To_WGS84, polyGeom)
+
+
+
+ return (polyGeom, areaM, angRad, lengthM)
+
+
+
+
+
+def geoWKTToPixelWKT(geom, inputRaster, targetSR, geomTransform, pixPrecision=2):
+
+
+ print('Deprecated')
+ ## shapely.wkt.dumps(geom, trim=pixPrecision)
+ ## newGeom = geom
+ ## newGeom.coords = np.round(np.array(geom.coords), pixPrecision)
+
+
+
+def search_rtree(test_building, index):
+ # input test shapely polygon geometry and rtree index
+ if test_building.geom_type == 'Polygon' or \
+ test_building.geom_type == 'MultiPolygon':
+ fidlist = index.intersection(test_building.bounds)
+ else:
+ fidlist = []
+
+ return fidlist
+
+
+def get_envelope(poly):
+
+ return poly.envelope
+
+def utm_getZone(longitude):
+
+ return (int(1+(longitude+180.0)/6.0))
+
+
+def utm_isNorthern(latitude):
+
+ if (latitude < 0.0):
+ return 0
+ else:
+ return 1
+
+def createUTMandLatLonCrs(polyGeom):
+
+ polyCentroid = polyGeom.centroid
+ utm_zone = utm_getZone(polyCentroid.x)
+ is_northern = utm_isNorthern(polyCentroid.y)
+ if is_northern:
+ directionIndicator = '+north'
+ else:
+ directionIndicator = '+south'
+
+ utm_crs = {'datum': 'NAD83',
+ 'ellps': 'GRS80',
+ 'proj': 'utm',
+ 'zone': utm_zone,
+ 'units': 'm'}
+
+ latlong_crs = {'init': 'epsg:4326'}
+
+ return utm_crs, latlong_crs
+
+def createUTMTransform(polyGeom):
+
+ polyCentroid = polyGeom.centroid
+ utm_zone = utm_getZone(polyCentroid.x)
+ is_northern = utm_isNorthern(polyCentroid.y)
+ if is_northern:
+ directionIndicator = '+north'
+ else:
+ directionIndicator = '+south'
+
+ projectTO_UTM = partial(
+ pyproj.transform,
+ pyproj.Proj("+proj=longlat +datum=WGS84 +no_defs"), #Proj(proj='latlong',datum='WGS84')
+ pyproj.Proj("+proj=utm +zone={} {} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(utm_zone,
+ directionIndicator))
+ )
+
+
+ projectTO_WGS = partial(
+ pyproj.transform,
+ pyproj.Proj("+proj=utm +zone={} {} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(utm_zone,
+ directionIndicator)
+ ),
+ pyproj.Proj("+proj=longlat +datum=WGS84 +no_defs"), # Proj(proj='latlong',datum='WGS84')
+
+ )
+
+ return projectTO_UTM, projectTO_WGS
+
+
+def getRasterExtent(srcImage):
+
+ poly = Polygon(((srcImage.bounds.left, srcImage.bounds.top),
+ (srcImage.bounds.right, srcImage.bounds.top),
+ (srcImage.bounds.right, srcImage.bounds.bottom),
+ (srcImage.bounds.left, srcImage.bounds.bottom))
+ )
+
+
+
+ return srcImage.affine, \
+ poly, \
+ srcImage.bounds.left, \
+ srcImage.bounds.top, \
+ srcImage.bounds.right, \
+ srcImage.bounds.bottom
+
+def createPolygonFromCenterPointXY(cX,cY, radiusMeters, transform_WGS_To_UTM_Flag=True):
+
+
+ point = Point(cX, cY)
+
+ return createPolygonFromCenterPoint(point, radiusMeters, transform_WGS_To_UTM_Flag=True)
+
+def createPolygonFromCenterPoint(point, radiusMeters, transform_WGS_To_UTM_Flag=True):
+
+
+ transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(point)
+ if transform_WGS_To_UTM_Flag:
+ point = shapely.ops.tranform(transform_WGS84_To_UTM, point)
+
+ poly = point.Buffer(radiusMeters)
+
+ if transform_WGS_To_UTM_Flag:
+ poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly)
+
+ return poly
+
+
+def createPolygonFromCentroidGDF(gdf, radiusMeters, transform_WGS_To_UTM_Flag=True):
+
+ if transform_WGS_To_UTM_Flag:
+ transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(gdf.centroid.values[0])
+ gdf.to_crs()
+ point = shapely.ops.tranform(transform_WGS84_To_UTM, point)
+
+ poly = point.Buffer(radiusMeters)
+
+ if transform_WGS_To_UTM_Flag:
+ poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly)
+
+ return poly
+
+
+def createPolygonFromCorners(left,bottom,right, top):
+ # Create ring
+ poly = Polygon((left, top),
+ (right, top),
+ (right, bottom),
+ (left, bottom)
+ )
+
+ return poly
+
+
+def clipShapeFileGPD(geoDF, outputFileName, polyToCut, minpartialPerc=0.0, shapeLabel='Geo', debug=False):
+ # check if geoDF has origAreaField
+ outGeoJSon = os.path.splitext(outputFileName)[0] + '.geojson'
+ if not os.path.exists(os.path.dirname(outGeoJSon)):
+ os.makedirs(os.path.dirname(outGeoJSon))
+ if debug:
+ print(outGeoJSon)
+
+ if 'origarea' in geoDF.columns:
+ pass
+ else:
+ geoDF['origarea'] = geoDF.area
+
+
+
+ cutGeoDF = geoDF[geoDF.intersetion(polyToCut).area/geoDF['origarea'] > minpartialPerc]
+ cutGeoDF['partialDec'] = cutGeoDF.area/cutGeoDF['origarea']
+ cutGeoDF['truncated'] = cutGeoDF['partialDec']==1.0
+
+ ##TODO Verify this works in DockerBuild
+ cutGeoDF.to_file(outGeoJSon, driver='GeoJSON')
+
+def clipShapeFile(shapeSrc, outputFileName, polyToCut, minpartialPerc=0.0, shapeLabel='Geo', debug=False):
+ ## Deprecated
+ print('Deprecated use clipShapeFileGPD')
+ source_layer = shapeSrc.GetLayer()
+ source_srs = source_layer.GetSpatialRef()
+ # Create the output Layer
+
+ outGeoJSon = os.path.splitext(outputFileName)[0] + '.geojson'
+ if not os.path.exists(os.path.dirname(outGeoJSon)):
+ os.makedirs(os.path.dirname(outGeoJSon))
+ print(outGeoJSon)
+ outDriver = ogr.GetDriverByName("geojson")
+ if os.path.exists(outGeoJSon):
+ outDriver.DeleteDataSource(outGeoJSon)
+
+ if debug:
+ outGeoJSonDebug = outputFileName.replace('.tif', 'outline.geojson')
+ outDriverDebug = ogr.GetDriverByName("geojson")
+ if os.path.exists(outGeoJSonDebug):
+ outDriverDebug.DeleteDataSource(outGeoJSonDebug)
+ outDataSourceDebug = outDriver.CreateDataSource(outGeoJSonDebug)
+ outLayerDebug = outDataSourceDebug.CreateLayer("groundTruth", source_srs, geom_type=ogr.wkbPolygon)
+
+ outFeatureDebug = ogr.Feature(source_layer.GetLayerDefn())
+ outFeatureDebug.SetGeometry(polyToCut)
+ outLayerDebug.CreateFeature(outFeatureDebug)
+
+
+ outDataSource = outDriver.CreateDataSource(outGeoJSon)
+ outLayer = outDataSource.CreateLayer("groundTruth", source_srs, geom_type=ogr.wkbPolygon)
+ # Add input Layer Fields to the output Layer
+ inLayerDefn = source_layer.GetLayerDefn()
+ for i in range(0, inLayerDefn.GetFieldCount()):
+ fieldDefn = inLayerDefn.GetFieldDefn(i)
+ outLayer.CreateField(fieldDefn)
+ outLayer.CreateField(ogr.FieldDefn("partialBuilding", ogr.OFTReal))
+ outLayer.CreateField(ogr.FieldDefn("partialDec", ogr.OFTReal))
+ outLayerDefn = outLayer.GetLayerDefn()
+ source_layer.SetSpatialFilter(polyToCut)
+ for inFeature in source_layer:
+
+ outFeature = ogr.Feature(outLayerDefn)
+
+ for i in range (0, inLayerDefn.GetFieldCount()):
+ outFeature.SetField(inLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))
+
+ geom = inFeature.GetGeometryRef()
+ geomNew = geom.Intersection(polyToCut)
+ partialDec = -1
+ if geomNew:
+
+ if geomNew.GetGeometryName()=='POINT':
+ outFeature.SetField("partialDec", 1)
+ outFeature.SetField("partialBuilding", 1)
+ else:
+
+ if geom.GetArea() > 0:
+ partialDec = geomNew.GetArea() / geom.GetArea()
+ else:
+ partialDec = 0
+
+ outFeature.SetField("partialDec", partialDec)
+
+ if geom.GetArea() == geomNew.GetArea():
+ outFeature.SetField("partialBuilding", 0)
+ else:
+ outFeature.SetField("partialBuilding", 1)
+
+
+ else:
+ outFeature.SetField("partialBuilding", 1)
+ outFeature.SetField("partialBuilding", 1)
+
+
+ outFeature.SetGeometry(geomNew)
+ if partialDec >= minpartialPerc:
+ outLayer.CreateFeature(outFeature)
+ #print ("AddFeature")
+
+
+def cutChipFromMosaicGPD(rasterFileList, shapeFileSrcList, outlineSrc='',outputDirectory='', outputPrefix='clip_',
+ clipSizeMX=100, clipSizeMY=100, clipOverlap=0.0, minpartialPerc=0.0, createPix=False,
+ baseName='',
+ imgIdStart=-1,
+ parrallelProcess=False,
+ noBlackSpace=False,
+ randomClip=-1,
+ debug=False
+ ):
+
+ srcImage = rio.open(rasterFileList[0][0])
+ geoTrans, poly, ulX, ulY, lrX, lrY = getRasterExtent(srcImage)
+ # geoTrans[1] w-e pixel resolution
+ # geoTrans[5] n-s pixel resolution
+ if outputDirectory=="":
+ outputDirectory=os.path.dirname(rasterFileList[0][0])
+
+ rasterFileBaseList = []
+ for rasterFile in rasterFileList:
+ rasterFileBaseList.append(os.path.basename(rasterFile[0]))
+
+ if not createPix:
+ transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(poly)
+ poly = shapely.ops.tranform(transform_WGS84_To_UTM, poly)
+
+ minX, minY, maxX, maxY = poly.bounds
+
+ #return poly to WGS84
+ if not createPix:
+ poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly)
+
+
+ shapeSrcList = []
+ for shapeFileSrc in shapeFileSrcList:
+ print(shapeFileSrc[1])
+ shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]])
+
+
+ if outlineSrc == '':
+ geomOutline = poly
+ else:
+ outline = gpd.read_file(outlineSrc)
+ geomOutlineBase = outline.geometry[0]
+ geomOutline = geomOutlineBase.Intersection(poly)
+
+ chipSummaryList = []
+
+ for rasterFile in rasterFileList:
+ if not os.path.exists(os.path.join(outputDirectory, rasterFile[1])):
+ os.makedirs(os.path.join(outputDirectory, rasterFile[1]))
+ idx = 0
+ if createPix:
+ if debug:
+ print(geoTrans)
+
+ clipSizeMX=clipSizeMX*geoTrans[1]
+ clipSizeMY=abs(clipSizeMY*geoTrans[5])
+
+ xInterval = np.arange(minX, maxX, clipSizeMX*(1.0-clipOverlap))
+ if debug:
+ print('minY = {}'.format(minY))
+ print('maxY = {}'.format(maxY))
+ print('clipsizeMX ={}'.format(clipSizeMX))
+ print('clipsizeMY ={}'.format(clipSizeMY))
+
+ yInterval = np.arange(minY, maxY, clipSizeMY*(1.0-clipOverlap))
+
+ if debug:
+ print(xInterval)
+ print(yInterval)
+
+ for llX in xInterval:
+
+ for llY in yInterval:
+ uRX = llX+clipSizeMX
+ uRY = llY+clipSizeMY
+
+ # check if uRX or uRY is outside image
+ if noBlackSpace:
+ if uRX > maxX:
+ uRX = maxX
+ llX = maxX - clipSizeMX
+ if uRY > maxY:
+ uRY = maxY
+ llY = maxY - clipSizeMY
+
+ polyCut = createPolygonFromCorners(llX, llY, uRX, uRY)
+
+
+ if not createPix:
+
+ polyCut.Transform(transform_UTM_To_WGS84)
+ ## add debug line do cuts
+ if (polyCut).Intersects(geomOutline):
+
+ if debug:
+ print("Do it.")
+
+ minXCut = llX
+ minYCut = llY
+ maxXCut = uRX
+ maxYCut = uRY
+
+ if debug:
+ print('minYCut = {}'.format(minYCut))
+ print('maxYCut = {}'.format(maxYCut))
+ print('minXCut = {}'.format(minXCut))
+ print('maxXCut = {}'.format(maxXCut))
+ print('clipsizeMX ={}'.format(clipSizeMX))
+ print('clipsizeMY ={}'.format(clipSizeMY))
+
+ idx = idx+1
+ if imgIdStart == -1:
+ imgId = -1
+ else:
+ imgId = idx
+
+ chipSummary = createclip(outputDirectory, rasterFileList, shapeSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=rasterFileBaseList,
+ minpartialPerc=minpartialPerc,
+ outputPrefix=outputPrefix,
+ createPix=createPix,
+ rasterPolyEnvelope=poly,
+ baseName=baseName,
+ imgId=imgId)
+
+ chipSummaryList.append(chipSummary)
+
+ return chipSummaryList
+
+def cutChipFromMosaic(rasterFileList, shapeFileSrcList, outlineSrc='',outputDirectory='', outputPrefix='clip_',
+ clipSizeMX=100, clipSizeMY=100, clipOverlap=0.0, minpartialPerc=0.0, createPix=False,
+ baseName='',
+ imgIdStart=-1,
+ parrallelProcess=False,
+ noBlackSpace=False,
+ randomClip=-1):
+ #rasterFileList = [['rasterLocation', 'rasterDescription']]
+ # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'],
+ # ['/path/to/8band_AOI_1.tif, '8band']
+ # ]
+ # open Base Image
+ #print(rasterFileList[0][0])
+ srcImage = gdal.Open(rasterFileList[0][0])
+ geoTrans, poly, ulX, ulY, lrX, lrY = getRasterExtent(srcImage)
+ # geoTrans[1] w-e pixel resolution
+ # geoTrans[5] n-s pixel resolution
+ if outputDirectory=="":
+ outputDirectory=os.path.dirname(rasterFileList[0][0])
+
+ rasterFileBaseList = []
+ for rasterFile in rasterFileList:
+ rasterFileBaseList.append(os.path.basename(rasterFile[0]))
+
+ if not createPix:
+ transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(poly)
+ poly = shapely.ops.tranform(transform_WGS84_To_UTM, poly)
+
+ env = poly.GetEnvelope()
+ minX = env[0]
+ minY = env[2]
+ maxX = env[1]
+ maxY = env[3]
+
+ #return poly to WGS84
+ if not createPix:
+ poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly)
+
+
+ shapeSrcList = []
+ for shapeFileSrc in shapeFileSrcList:
+ print(shapeFileSrc[1])
+ shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]])
+
+
+ if outlineSrc == '':
+ geomOutline = poly
+ else:
+ outline = ogr.Open(outlineSrc)
+ layer = outline.GetLayer()
+ featureOutLine = layer.GetFeature(0)
+ geomOutlineBase = featureOutLine.GetGeometryRef()
+ geomOutline = geomOutlineBase.Intersection(poly)
+
+ chipSummaryList = []
+
+ for rasterFile in rasterFileList:
+ if not os.path.exists(os.path.join(outputDirectory, rasterFile[1])):
+ os.makedirs(os.path.join(outputDirectory, rasterFile[1]))
+ idx = 0
+ if createPix:
+ print(geoTrans)
+ clipSizeMX=clipSizeMX*geoTrans[1]
+ clipSizeMY=abs(clipSizeMY*geoTrans[5])
+
+ xInterval = np.arange(minX, maxX, clipSizeMX*(1.0-clipOverlap))
+ print('minY = {}'.format(minY))
+ print('maxY = {}'.format(maxY))
+ print('clipsizeMX ={}'.format(clipSizeMX))
+ print('clipsizeMY ={}'.format(clipSizeMY))
+
+ yInterval = np.arange(minY, maxY, clipSizeMY*(1.0-clipOverlap))
+ print(xInterval)
+ print(yInterval)
+ for llX in xInterval:
+ if parrallelProcess:
+ for llY in yInterval:
+ pass
+
+ else:
+ for llY in yInterval:
+ uRX = llX+clipSizeMX
+ uRY = llY+clipSizeMY
+
+ # check if uRX or uRY is outside image
+ if noBlackSpace:
+ if uRX > maxX:
+ uRX = maxX
+ llX = maxX - clipSizeMX
+ if uRY > maxY:
+ uRY = maxY
+ llY = maxY - clipSizeMY
+
+
+
+
+ polyCut = createPolygonFromCorners(llX, llY, uRX, uRY)
+
+
+
+
+
+
+
+ if not createPix:
+
+ polyCut.Transform(transform_UTM_To_WGS84)
+ ## add debug line do cuts
+ if (polyCut).Intersects(geomOutline):
+ print("Do it.")
+ #envCut = polyCut.GetEnvelope()
+ #minXCut = envCut[0]
+ #minYCut = envCut[2]
+ #maxXCut = envCut[1]
+ #maxYCut = envCut[3]
+
+ #debug for real
+ minXCut = llX
+ minYCut = llY
+ maxXCut = uRX
+ maxYCut = uRY
+
+ #print('minYCut = {}'.format(minYCut))
+ #print('maxYCut = {}'.format(maxYCut))
+ #print('minXCut = {}'.format(minXCut))
+ #print('maxXCut = {}'.format(maxXCut))
+
+ #print('clipsizeMX ={}'.format(clipSizeMX))
+ #print('clipsizeMY ={}'.format(clipSizeMY))
+
+
+
+ idx = idx+1
+ if imgIdStart == -1:
+ imgId = -1
+ else:
+ imgId = idx
+
+ chipSummary = createclip(outputDirectory, rasterFileList, shapeSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=rasterFileBaseList,
+ minpartialPerc=minpartialPerc,
+ outputPrefix=outputPrefix,
+ createPix=createPix,
+ rasterPolyEnvelope=poly,
+ baseName=baseName,
+ imgId=imgId,
+ s3Options=[])
+ chipSummaryList.append(chipSummary)
+
+ return chipSummaryList
+
+def createclip(outputDirectory, rasterFileList, shapeSrcList,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=[],
+ minpartialPerc=0,
+ outputPrefix='',
+ createPix=False,
+ rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"),
+ className='',
+ baseName='',
+ imgId=-1,
+ s3Options=[]):
+
+ #rasterFileList = [['rasterLocation', 'rasterDescription']]
+ # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'],
+ # ['/path/to/8band_AOI_1.tif, '8band']
+ # ]
+
+ polyCutWGS = createPolygonFromCorners(minXCut, minYCut, maxXCut, maxYCut)
+
+
+ if not rasterFileBaseList:
+ rasterFileBaseList = []
+ for rasterFile in rasterFileList:
+ rasterFileBaseList.append(os.path.basename(rasterFile[0]))
+
+ if rasterPolyEnvelope == '':
+ pass
+
+ chipNameList = []
+ for rasterFile in rasterFileList:
+ if className == '':
+ if imgId==-1:
+ chipNameList.append(outputPrefix + rasterFile[1] +
+ "_" + baseName + "_{}_{}.tif".format(minXCut, minYCut))
+ else:
+ chipNameList.append(outputPrefix + rasterFile[1] +
+ "_" + baseName + "_img{}.tif".format(imgId))
+ else:
+ if imgId==-1:
+ chipNameList.append(outputPrefix + className + "_" +
+ rasterFile[1] + "_" + baseName + "_{}_{}.tif".format(minXCut, minYCut))
+ else:
+ chipNameList.append(outputPrefix + className + '_' +
+ rasterFile[1] + "_" + baseName + "_img{}.tif".format(imgId))
+
+ # clip raster
+
+ for chipName, rasterFile in zip(chipNameList, rasterFileList):
+ outputFileName = os.path.join(outputDirectory, rasterFile[1], className, chipName)
+ ## Clip Image
+ print(rasterFile)
+ print(outputFileName)
+
+ #TODO replace gdalwarp with rasterio and windowed reads
+ cmd = ["gdalwarp", "-te", "{}".format(minXCut), "{}".format(minYCut), "{}".format(maxXCut),
+ "{}".format(maxYCut),
+ '-co', 'PHOTOMETRIC=rgb',
+ rasterFile[0], outputFileName]
+ cmd.extend(s3Options)
+ subprocess.call(cmd)
+
+ baseLayerRasterName = os.path.join(outputDirectory, rasterFileList[0][1], className, chipNameList[0])
+ outputFileName = os.path.join(outputDirectory, rasterFileList[0][1], chipNameList[0])
+
+
+ ### Clip poly to cust to Raster Extent
+ if rasterPolyEnvelope.GetArea() == 0:
+ srcImage = gdal.Open(rasterFileList[0][0])
+ geoTrans, rasterPolyEnvelope, ulX, ulY, lrX, lrY = getRasterExtent(srcImage)
+ polyVectorCut = polyCutWGS.Intersection(rasterPolyEnvelope)
+ else:
+ polyVectorCut = polyCutWGS.Intersection(rasterPolyEnvelope)
+
+ # Interate thorough Vector Src List
+ for shapeSrc in shapeSrcList:
+ if imgId == -1:
+ outGeoJson = outputPrefix + shapeSrc[1] + \
+ "_" + baseName + "_{}_{}.geojson".format(minXCut, minYCut)
+ else:
+ outGeoJson = outputPrefix + shapeSrc[1] + \
+ "_" + baseName + "_img{}.geojson".format(imgId)
+
+ outGeoJson = os.path.join(outputDirectory, 'geojson', shapeSrc[1], outGeoJson)
+
+ clipShapeFile(shapeSrc[0], outGeoJson, polyVectorCut, minpartialPerc=minpartialPerc)
+
+
+ chipSummary = {'rasterSource': baseLayerRasterName,
+ 'chipName': chipNameList[0],
+ 'geoVectorName': outGeoJson,
+ 'pixVectorName': ''
+ }
+
+ return chipSummary
+
+def cutChipFromRasterCenter(rasterFileList, shapeFileSrc, outlineSrc='',
+ outputDirectory='', outputPrefix='clip_',
+ clipSizeMeters=50, createPix=False,
+ classFieldName = 'TYPE',
+ minpartialPerc=0.1,
+ ):
+ #rasterFileList = [['rasterLocation', 'rasterDescription']]
+ # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'],
+ # ['/path/to/8band_AOI_1.tif, '8band']
+ # ]
+ srcImage = gdal.Open(rasterFileList[0][0])
+ geoTrans, poly, ulX, ulY, lrX, lrY = getRasterExtent(srcImage)
+
+ if outputDirectory == "":
+ outputDirectory = os.path.dirname(rasterFileList[0])
+
+ rasterFileBaseList = []
+ for rasterFile in rasterFileList:
+ rasterFileBaseList.append(os.path.basename(rasterFile[0]))
+
+ transform_WGS84_To_UTM, transform_UTM_To_WGS84, utm_cs = createUTMTransform(poly)
+ poly.Transform(transform_WGS84_To_UTM)
+ env = poly.GetEnvelope()
+
+ # return poly to WGS84
+ poly.Transform(transform_UTM_To_WGS84)
+
+ shapeSrc = ogr.Open(shapeFileSrc, 0)
+ if outlineSrc == '':
+ geomOutline = poly
+ else:
+ outline = ogr.Open(outlineSrc)
+ layer = outline.GetLayer()
+ featureOutLine = layer.GetFeature(0)
+ geomOutlineBase = featureOutLine.GetGeometryRef()
+ geomOutline = geomOutlineBase.Intersection(poly)
+
+ shapeSrcBase = ogr.Open(shapeFileSrc, 0)
+ layerBase = shapeSrcBase.GetLayer()
+ layerBase.SetSpatialFilter(geomOutline)
+ for rasterFile in rasterFileList:
+ if not os.path.exists(os.path.join(outputDirectory, rasterFile[1])):
+ os.makedirs(os.path.join(outputDirectory, rasterFile[1]))
+ for feature in layerBase:
+ featureGeom = feature.GetGeometryRef()
+ cx, cy, cz = featureGeom.Centroid().GetPoint()
+ polyCut = createPolygonFromCenterPoint(cx, cy, radiusMeters=clipSizeMeters)
+ print(classFieldName)
+ classDescription = feature.GetField(classFieldName)
+ classDescription = classDescription.replace(" ","")
+ envCut = polyCut.GetEnvelope()
+ minXCut = envCut[0]
+ minYCut = envCut[2]
+ maxXCut = envCut[1]
+ maxYCut = envCut[3]
+ createclip(outputDirectory, rasterFileList, shapeSrc,
+ maxXCut, maxYCut, minYCut, minXCut,
+ rasterFileBaseList=rasterFileBaseList,
+ minpartialPerc=minpartialPerc,
+ outputPrefix=outputPrefix,
+ createPix=createPix,
+ rasterPolyEnvelope=poly,
+ className=classDescription)
+
+
+
+def rotateClip(clipFileName, sourceGeoJson, rotaionList=[0,90,180,275]):
+ # will add "_{}.ext".formate(rotationList[i]
+ pass
+
+
+
+def createMaskedMosaic(input_raster, output_raster, outline_file):
+
+ subprocess.call(["gdalwarp", "-q", "-cutline", outline_file, "-of", "GTiff", input_raster, output_raster,
+ '-wo', 'OPTIMIZE_SIZE=YES',
+ '-co', 'COMPRESS=JPEG',
+ '-co', 'PHOTOMETRIC=YCBCR',
+ '-co', 'TILED=YES'])
+
+
+
+def explodeGeoPandasFrame(inGDF):
+
+ #This function splits entries with MultiPolygon geometries into Polygon Geometries
+
+ outdf = gpd.GeoDataFrame(columns=inGDF.columns)
+ for idx, row in inGDF.iterrows():
+ if type(row.geometry) == Polygon:
+ outdf = outdf.append(row,ignore_index=True)
+ if type(row.geometry) == MultiPolygon:
+ multdf = gpd.GeoDataFrame(columns=inGDF.columns)
+ recs = len(row.geometry)
+ multdf = multdf.append([row]*recs,ignore_index=True)
+ for geom in range(recs):
+ multdf.loc[geom,'geometry'] = row.geometry[geom]
+ multdf.head()
+ outdf = outdf.append(multdf,ignore_index=True)
+
+ if type(row.geometry) == LineString:
+ outdf = outdf.append(row, ignore_index=True)
+
+ if type(row.geometry) == MultiLineString:
+ multdf = gpd.GeoDataFrame(columns=inGDF.columns)
+ recs = len(row.geometry)
+ multdf = multdf.append([row]*recs,ignore_index=True)
+ for geom in range(recs):
+ multdf.loc[geom,'geometry'] = row.geometry[geom]
+ multdf.head()
+ outdf = outdf.append(multdf,ignore_index=True)
+
+
+ outdf.crs = inGDF.crs
+
+
+ return outdf
+
+def calculateCenterLineFromGeopandasPolygon(inGDF,
+ centerLineDistanceInput_Meters=5,
+ simplifyDistanceMeters=5,
+ projectToUTM=True):
+
+ # project To UTM for GeoSpatial Measurements
+ if projectToUTM:
+ tmpGDF = osmnx.project_gdf(inGDF)
+ else:
+ tmpGDF = inGDF
+
+ # Explode GeoPandas
+ tmpGDF1 = explodeGeoPandasFrame(tmpGDF)
+ tmpGDF1.crs = tmpGDF.crs
+ gdf_centerline_utm = tmpGDF1
+
+
+ # Loop through Geomertries to calculate Centerline for Each Polygon
+ listOfGeoms = tmpGDF1['geometry'].values
+ lineStringList = []
+
+ for geom in listOfGeoms:
+ tmpGeom = centerline.Centerline(geom, centerLineDistanceInput_Meters)
+ lineStringList.append(tmpGeom.createCenterline())
+
+ gdf_centerline_utm['geometry'] = lineStringList
+
+ lineList = gdf_centerline_utm['geometry'].values
+ lineSimplifiedList = []
+
+ for geo in lineList:
+
+
+ if geo.type == 'MultiLineString':
+
+ geoNew = shapely.ops.linemerge(geo).simplify(simplifyDistanceMeters, preserve_topology=False)
+
+ else:
+
+ geoNew = geo.simplify(simplifyDistanceMeters, preserve_topology=False)
+
+ lineSimplifiedList.append(geoNew)
+
+ simplifiedGdf_utm = gpd.GeoDataFrame({'geometry': lineSimplifiedList})
+ simplifiedGdf_utm.crs = tmpGDF.crs
+ print (tmpGDF.crs)
+
+ if projectToUTM:
+ gdf_simple_centerline = simplifiedGdf_utm.to_crs(inGDF.crs)
+ else:
+ gdf_simple_centerline = simplifiedGdf_utm
+
+
+ return gdf_simple_centerline
+
+
+def calculateCenterLineFromOGR(inputSrcFile, centerLineDistanceInput_Meters=5, outputShpFile=''):
+
+ inGDF = gpd.read_file(inputSrcFile)
+ outGDF = calculateCenterLineFromGeopandasPolygon(inGDF, centerLineDistanceInput_Meters=centerLineDistanceInput_Meters)
+
+ if outputShpFile != '':
+ outGDF.to_file(outputShpFile)
+
+
+ return outGDF
+
+
+def createBufferGeoPandas(inGDF, bufferDistanceMeters=5, bufferRoundness=1, projectToUTM=True):
+ # Calculate CenterLine
+ ## Define Buffer Constraints
+
+
+ # Transform gdf Roadlines into UTM so that Buffer makes sense
+ if projectToUTM:
+ tmpGDF = osmnx.project_gdf(inGDF)
+ else:
+ tmpGDF = inGDF
+
+ gdf_utm_buffer = tmpGDF
+
+ # perform Buffer to produce polygons from Line Segments
+ gdf_utm_buffer['geometry'] = tmpGDF.buffer(bufferDistanceMeters,
+ bufferRoundness)
+
+ gdf_utm_dissolve = gdf_utm_buffer.dissolve(by='class')
+ gdf_utm_dissolve.crs = gdf_utm_buffer.crs
+
+ if projectToUTM:
+ gdf_buffer = gdf_utm_dissolve.to_crs(inGDF.crs)
+ else:
+ gdf_buffer = gdf_utm_dissolve
+
+
+ return gdf_buffer
+
+#def project_gdfToUTM(gdf, lat=[], lon=[]):
+
+
diff --git a/python/spaceNetUtilities/geoToolsGPD.pyc b/python/spaceNetUtilities/geoToolsGPD.pyc
new file mode 100644
index 0000000..e77dd79
Binary files /dev/null and b/python/spaceNetUtilities/geoToolsGPD.pyc differ
diff --git a/python/spaceNetUtilities/inferenceTools.py b/python/spaceNetUtilities/inferenceTools.py
new file mode 100644
index 0000000..9c99ecf
--- /dev/null
+++ b/python/spaceNetUtilities/inferenceTools.py
@@ -0,0 +1,156 @@
+import numpy as np
+import rasterio
+from rasterio import Affine
+from rasterio.warp import reproject, Resampling
+import types
+
+
+def get_8chan_SpectralClip(datapathPSRGB, datapathPSMUL, bs_rgb, bs_mul, imageid=[], debug=False):
+ im = []
+
+ with rasterio.open(datapathPSMUL, 'r') as f:
+ # values = f.read().astype(np.float32)
+ usechannels = [5, 3, 2, 2, 3, 6, 7, 8]
+ for chan_i in usechannels:
+ values = f.read(chan_i).astype(np.float32)
+ min_val = np.min(values)
+ max_val = np.max(values)
+ mean_val = np.mean(values)
+ values = np.clip(values, min_val, max_val)
+ values = (values - min_val) / (max_val - min_val)
+ im.append(values)
+
+ if debug:
+ print(values.shape)
+ print(np.min(values))
+ print(np.max(values))
+ if debug:
+ print('im[0] shape:')
+ print(len(im))
+ im = np.array(im) # (ch, w, h)
+
+ if debug:
+ print(im.shape)
+
+ return im
+
+
+def get_8chan_SpectralClipAll(datapathPSRGB, datapathPSMUL, bs_rgb, bs_mul, imageid=[], debug=False):
+ im = []
+
+ with rasterio.open(datapathPSRGB, 'r') as f:
+ usechannels = [1, 2, 3]
+ for chan_i in usechannels:
+ values = f.read(chan_i).astype(np.float32)
+ min_val = np.min(values)
+ max_val = np.max(values)
+ values = np.clip(values, min_val, max_val)
+ values = (values - min_val) / (max_val - min_val)
+ values = values - np.mean(values)
+ im.append(values)
+
+ with rasterio.open(datapathPSMUL, 'r') as f:
+
+ usechannels = [2, 3, 6, 7, 8]
+ for chan_i in usechannels:
+ values = f.read(chan_i).astype(np.float32)
+ min_val = np.min(values)
+ max_val = np.max(values)
+ values = np.clip(values, min_val, max_val)
+ values = (values - min_val) / (max_val - min_val)
+ values = values - np.mean(values)
+ im.append(values)
+
+ if debug:
+ print(values.shape)
+ print(np.min(values))
+ print(np.max(values))
+
+ if debug:
+ print('im[0] shape:')
+ print(len(im))
+ im = np.array(im) # (ch, w, h)
+
+ if debug:
+ print(im.shape)
+
+ return im
+
+
+def resampleImage(src, array, spatialScaleFactor):
+ # arr = src.read()
+ newarr = np.empty(shape=(array.shape[0], # same number of bands
+ round(array.shape[1] * spatialScaleFactor), # 150% resolution
+ round(array.shape[2] * spatialScaleFactor)))
+
+ # adjust the new affine transform to the 150% smaller cell size
+ src_transform = src.affine
+ dst_transform = Affine(src_transform.a / spatialScaleFactor, src_transform.b, src_transform.c,
+ src_transform.d, src_transform.e / spatialScaleFactor, src_transform.f)
+
+ reproject(
+ array, newarr,
+ src_transform=src_transform,
+ dst_transform=dst_transform,
+ src_crs=src.crs,
+ dst_crs=src.crs,
+ resample=Resampling.bilinear)
+
+ return newarr
+
+
+def imageSlicer(image, strideInPixels, windowSize=[256, 256], debug=False, immean=np.array([])):
+ # slide a window across the image
+ for y in range(0, image.shape[1], strideInPixels):
+ for x in range(0, image.shape[2], strideInPixels):
+ # yield the current window
+ if debug:
+ print(image[:, y:y + windowSize[1], x:x + windowSize[0]].shape)
+
+ if y + windowSize[1] > image.shape[1]:
+ y = image.shape[1] - windowSize[1]
+
+ if x + windowSize[0] > image.shape[2]:
+ x = image.shape[2] - windowSize[0]
+
+ if immean.size == 0:
+ yield (image[:, y:y + windowSize[1], x:x + windowSize[0]])
+ else:
+ yield (image[:, y:y + windowSize[1], x:x + windowSize[0]] - immean)
+
+
+def imageCombiner(listOfImages, image, strideInPixels, windowSize=[256, 256], debug=False):
+ if debug:
+ print(image.shape)
+ newImage = np.empty(shape=(1,
+ image.shape[1],
+ image.shape[2]))
+
+ newImageCount = np.empty(shape=(image.shape[0],
+ image.shape[1],
+ image.shape[2]))
+ if isinstance(listOfImages, types.GeneratorType):
+ listOfImages = list(listOfImages)
+ idx = 0
+ for y in range(0, image.shape[1], strideInPixels):
+ for x in range(0, image.shape[2], strideInPixels):
+ # yield the current window
+
+ if y + windowSize[1] > image.shape[1]:
+ y = image.shape[1] - windowSize[1]
+
+ if x + windowSize[0] > image.shape[2]:
+ x = image.shape[2] - windowSize[0]
+
+ newImage[:, y:y + windowSize[1], x:x + windowSize[0]] += listOfImages[idx]
+
+ newImageCount[:, y:y + windowSize[1], x:x + windowSize[0]] += 1
+
+ idx += 1
+
+ return newImage, newImageCount
+
+
+
+
+
diff --git a/python/spaceNetUtilities/labelTools.py b/python/spaceNetUtilities/labelTools.py
index f4ef4ec..33bf759 100644
--- a/python/spaceNetUtilities/labelTools.py
+++ b/python/spaceNetUtilities/labelTools.py
@@ -508,16 +508,20 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='',
convertTo8Bit=True,
outputPixType='Byte',
outputFormat='GTiff',
- bboxResize=1.0):
+ bboxResize=1.0,
+ objectClassDict='',
+ attributeName=''):
print("creating {}".format(xmlFileName))
- buildingList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True,
- breakMultiPolygonGeo=True, pixPrecision=2)
- # buildinglist.append({'ImageId': image_id,
+ featureList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True,
+ breakMultiPolygonGeo=True, pixPrecision=2, attributeName=attributeName,
+ objectClassDict=objectClassDict)
+ # featureList.append({'ImageId': image_id,
#'BuildingId': building_id,
#'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()),
#'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY')
- #})
+ #'featuerName': featureName,
+ # 'featureIdNum': featureIdNum})
@@ -582,13 +586,13 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='',
SubElement(top, 'segmented').text = str(segmented)
# start object segment
- for building in buildingList:
- objectType = 'building'
+ for feature in featureList:
+ objectType = feature['featureName']
objectPose = 'Left'
objectTruncated = 0 # 1=True, 0 = False
objectDifficulty = 0 # 0 Easy - 3 Hard
- env = building['polyPix'].GetEnvelope()
+ env = feature['polyPix'].GetEnvelope()
xmin=env[0]
ymin=env[2]
xmax=env[1]
@@ -638,6 +642,8 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='',
idField = ogr.FieldDefn("objid", ogr.OFTInteger)
innerBufferLayer.CreateField(idField)
+ idField = ogr.FieldDefn("clsid", ogr.OFTInteger)
+ innerBufferLayer.CreateField(idField)
featureDefn = innerBufferLayer.GetLayerDefn()
bufferDist = srcRaster.GetGeoTransform()[1]*bufferSizePix
@@ -658,6 +664,12 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='',
inBufFeature = ogr.Feature(featureDefn)
inBufFeature.SetGeometry(geomBufferIn)
inBufFeature.SetField('objid', idx)
+ if attributeName !='':
+ inBufFeature.SetField('clsid', objectClassDict[feature.GetField(attributeName)]['featureIdNum'])
+ else:
+ inBufFeature.SetField('clsid', 100)
+
+
innerBufferLayer.CreateFeature(inBufFeature)
outBufFeature = None
@@ -681,7 +693,7 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='',
print('rasterize outer buffer')
gdal.RasterizeLayer(target_ds, [1], outerBufferLayer, burn_values=[255])
print('rasterize inner buffer')
- gdal.RasterizeLayer(target_ds, [1], innerBufferLayer, burn_values=[100])
+ gdal.RasterizeLayer(target_ds, [1], innerBufferLayer, burn_values=[100], options=['ATTRIBUTE=clsid'])
print('writing png sgcls')
# write to .png
imageArray = np.array(target_ds.GetRasterBand(1).ReadAsArray())
@@ -742,17 +754,22 @@ def geoJsonToDARKNET(xmlFileName, geoJson, rasterImageName, im_id='',
convertTo8Bit=True,
outputPixType='Byte',
outputFormat='GTiff',
- bboxResize=1.0):
+ bboxResize=1.0,
+ objectClassDict='',
+ attributeName=''):
+
xmlFileName = xmlFileName.replace(".xml", ".txt")
print("creating {}".format(xmlFileName))
- buildingList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True,
- breakMultiPolygonGeo=True, pixPrecision=2)
- # buildinglist.append({'ImageId': image_id,
+ featureList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True,
+ breakMultiPolygonGeo=True, pixPrecision=2, attributeName=attributeName,
+ objectClassDict=objectClassDict)
+ # featureList.append({'ImageId': image_id,
#'BuildingId': building_id,
#'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()),
#'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY')
- #})
+ #'featuerName': featureName,
+ #'featureIdNum': featureIdNum})
srcRaster = gdal.Open(rasterImageName)
@@ -788,12 +805,12 @@ def geoJsonToDARKNET(xmlFileName, geoJson, rasterImageName, im_id='',
with open(xmlFileName, 'w') as f:
- for building in buildingList:
+ for feature in featureList:
# Get Envelope returns a tuple (minX, maxX, minY, maxY)
- boxDim = building['polyPix'].GetEnvelope()
+ boxDim = feature['polyPix'].GetEnvelope()
if bboxResize != 1.0:
xmin = boxDim[0]
@@ -814,7 +831,7 @@ def geoJsonToDARKNET(xmlFileName, geoJson, rasterImageName, im_id='',
rasterSize = (srcRaster.RasterXSize, srcRaster.RasterYSize)
lineOutput = convertPixDimensionToPercent(rasterSize, boxDim)
- classNum=0
+ classNum=feature['featureIdNum']
f.write('{} {} {} {} {}\n'.format(classNum,
lineOutput[0],
lineOutput[1],
diff --git a/python/spaceNetUtilities/labelTools.pyc b/python/spaceNetUtilities/labelTools.pyc
index a5d7411..c9dda54 100644
Binary files a/python/spaceNetUtilities/labelTools.pyc and b/python/spaceNetUtilities/labelTools.pyc differ
diff --git a/python/spaceNetUtilities/osmtools.py b/python/spaceNetUtilities/osmtools.py
new file mode 100644
index 0000000..519156c
--- /dev/null
+++ b/python/spaceNetUtilities/osmtools.py
@@ -0,0 +1,52 @@
+import geopandas as gpd
+import geopandas_osm
+from osmnx import core
+from shapely.geometry import Polygon
+
+
+def createPolygonShapeFileFromOSM(polyBounds, pointOfInterestList, outVectorFile='', debug=True):
+
+ gdfOSM = gpd.GeoDataFrame([])
+ for pointOfInterestKey in pointOfInterestList.keys():
+
+ gdfPOITemp = core.osm_net_download(polygon=polyBounds,
+ infrastructure='node["{}"]'.format(pointOfInterestKey)
+ )
+ gdfOSM.append(gdfPOITemp)
+
+ gdfPOITemp = core.osm_net_download(polygon=polyBounds,
+ infrastructure='way["{}"]'.format(pointOfInterestKey)
+ )
+
+ gdfOSM.append(gdfPOITemp)
+
+ #gdfPOITemp = geopandas_osm.osm.query_osm('way', polyBounds, recurse='down',
+ # tags='{}'.format(pointOfInterestKey))
+
+ gdfOSM.append(gdfPOITemp)
+
+ #gdfPOITemp = geopandas_osm.osm.query_osm('node', polyBounds, recurse='down',
+ # tags='{}'.format(pointOfInterestKey))
+
+ gdfOSM.append(gdfPOITemp)
+
+ gdfOSM.drop_duplicates(subset=['id'], keep='first', inplace=True)
+
+ ## convert closed LineStrings to Polygon
+ geomList = []
+ for geom in gdfOSM.geometry.values:
+ if geom.is_closed & geom.geom_type == "LineString":
+ geom = Polygon(geom)
+ else:
+ geom
+ geomList.append(geom)
+
+ gdfOSM.geometry = geomList
+
+ gdfOSM = gdfOSM[(gdfOSM.geom_type=='Point') or (gdfOSM.geom_type=='Polygon')]
+ ##
+
+
+ return gdfOSM
+
+ #gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOSM, outVectorFile=outVectorFile)
diff --git a/python/spaceNetUtilities/rasterTools.py b/python/spaceNetUtilities/rasterTools.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/spaceNetUtilities/vizTools.pyc b/python/spaceNetUtilities/vizTools.pyc
new file mode 100644
index 0000000..d864af3
Binary files /dev/null and b/python/spaceNetUtilities/vizTools.pyc differ
diff --git a/python/testFile.py b/python/testFile.py
new file mode 100644
index 0000000..3d01856
--- /dev/null
+++ b/python/testFile.py
@@ -0,0 +1,22 @@
+import sys
+sys.path.extend(['/Users/dlindenbaum/cosmiQGit/spaceNetUtilities_DaveL/python'])
+from spaceNetUtilities import evalToolsGPD as eT
+
+import geopandas as gpd
+
+srcTruthVector = '/Users/dlindenbaum/dataStorage/spaceNetPrivate_Truth/AOI_2_Vegas/srcData/vectorData/Vegas_Footprints.shp'
+srcTestVector = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C7-056155973080_01_P001.shp'
+
+
+truthDF = gpd.read_file(srcTruthVector)
+testDF = gpd.read_file(srcTestVector)
+
+prop_polysPoly = testDF.geometry.values
+sol_polysPoly = truthDF.geometry.values
+
+
+truthIndex = truthDF.sindex
+
+eval_function_input_list = [['ImageId', prop_polysPoly, sol_polysPoly, truthIndex]]
+print('Starting Eval')
+resultList = eT.evalfunction(eval_function_input_list[0])
diff --git a/testfile.geojson b/testfile.geojson
new file mode 100644
index 0000000..8ef1210
--- /dev/null
+++ b/testfile.geojson
@@ -0,0 +1,7 @@
+{
+"type": "FeatureCollection",
+"features": [
+{ "type": "Feature", "properties": { "df1": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 2.0, 0.0 ], [ 2.0, 2.0 ], [ 0.0, 2.0 ], [ 0.0, 0.0 ] ] ] } },
+{ "type": "Feature", "properties": { "df1": 2 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 2.0 ], [ 4.0, 2.0 ], [ 4.0, 4.0 ], [ 2.0, 4.0 ], [ 2.0, 2.0 ] ] ] } }
+]
+}
diff --git a/testfile1.geojson b/testfile1.geojson
new file mode 100644
index 0000000..8ef1210
--- /dev/null
+++ b/testfile1.geojson
@@ -0,0 +1,7 @@
+{
+"type": "FeatureCollection",
+"features": [
+{ "type": "Feature", "properties": { "df1": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 2.0, 0.0 ], [ 2.0, 2.0 ], [ 0.0, 2.0 ], [ 0.0, 0.0 ] ] ] } },
+{ "type": "Feature", "properties": { "df1": 2 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 2.0 ], [ 4.0, 2.0 ], [ 4.0, 4.0 ], [ 2.0, 4.0 ], [ 2.0, 2.0 ] ] ] } }
+]
+}
diff --git a/testfile2.geojson b/testfile2.geojson
new file mode 100644
index 0000000..8ef1210
--- /dev/null
+++ b/testfile2.geojson
@@ -0,0 +1,7 @@
+{
+"type": "FeatureCollection",
+"features": [
+{ "type": "Feature", "properties": { "df1": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 2.0, 0.0 ], [ 2.0, 2.0 ], [ 0.0, 2.0 ], [ 0.0, 0.0 ] ] ] } },
+{ "type": "Feature", "properties": { "df1": 2 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 2.0 ], [ 4.0, 2.0 ], [ 4.0, 4.0 ], [ 2.0, 4.0 ], [ 2.0, 2.0 ] ] ] } }
+]
+}