From 9dee5798cd3d4620e14c96562719698eec49a090 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 14:38:41 -0400 Subject: [PATCH 01/26] added createSpaceNetPOI.py for processing Raw Imagery and Point of Interest Dataset into a machine learning dataset --- python/createSpaceNetPOI.py | 260 ++++++++++++++++++++++++++++++++++++ 1 file changed, 260 insertions(+) create mode 100644 python/createSpaceNetPOI.py diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py new file mode 100644 index 0000000..8de7204 --- /dev/null +++ b/python/createSpaceNetPOI.py @@ -0,0 +1,260 @@ +from osgeo import ogr, gdal, osr +from spaceNetUtilities import geoTools as gT +import argparse +import os + + + +def returnBoundBoxM(tmpGeom, metersRadius=50): + poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(), + metersRadius) + + polyEnv = gT.get_envelope(poly) + + + + + return polyEnv + +### Define Point of Interest Dictionary +# {'spacenetFeatureName': +# {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping +# 'featureChipScaleM': 200, # Size of chip to create in meters +# 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point +# # indicating in the geojson. This will be used in creating polygons for IOU scoring +# } + +pointOfInterestList = {'Airfield_POIApron Lights Tower': { + 'featureIdNum': '1', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 10 + }, + 'Airfield_POIHangar': { + 'featureIdNum': '2', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, + 'Airfield_POIHelipad': { + 'featureIdNum': '3', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +'Airfield_POISupport Facility': { + 'featureIdNum': '4', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 10 + }, +'Bridges_TunnelsBridgePedestrian': { + 'featureIdNum': '5', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +'Bridges_TunnelsBridgeRoad': { + 'featureIdNum': '6', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +'Commercial_POIServiceBar': { + 'featureIdNum': '7', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +'Electrical_POITransmission Tower': { + 'featureIdNum': '8', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 10 + }, +'EmbassiesConsulate': { + 'featureIdNum': '9', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +'Olympic_Venues': { + 'featureIdNum': '10', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 50 + }, +'Public_Transportation_POIBusStop': { + 'featureIdNum': '11', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 5 + }, +'Railways_POI999999Station': { + 'featureIdNum': '12', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +'Recreation_POIPark': { + 'featureIdNum': '13', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 50 + }, +'Recreation_POIPool': { + 'featureIdNum': '14', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 50 + }, +'Recreation_POISports Facility': { + 'featureIdNum': '15', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 50 + }, +'Religious_InstitutionsChurch': { + 'featureIdNum': '16', + 'featureChipScaleM': 200, + 'featureBBoxSizeM': 25 + }, +} + + +def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList): + + ## Turn Point File into PolygonFile + + + srcDS = ogr.Open(srcVectorFile, 0) + inLayer = srcDS.GetLayer() + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + outDriver = ogr.GetDriverByName("GeoJSON") + if os.path.exists(outVectorFile): + outDriver.DeleteDataSource(outVectorFile) + + dstDS = outDriver.CreateDataSource(outVectorFile) + + outLayer = dstDS.CreateLayer('polygons', srs, geom_type=ogr.wkbPolygon) + + # Get Attribute names + inLayerDefn = inLayer.GetLayerDefn() + for i in range(0, inLayerDefn.GetFieldCount()): + fieldDefn = inLayerDefn.GetFieldDefn(i) + outLayer.CreateField(fieldDefn) + + outLayerDefn = outLayer.GetLayerDefn() + + # Copy Features + for i in range(0, inLayer.GetFeatureCount()): + + inFeature = inLayer.GetFeature(i) + outFeature = ogr.Feature(outLayerDefn) + for i in range(0, outLayerDefn.GetFieldCount()): + outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) + # Set geometry as centroid + tmpGeom = inFeature.GetGeometryRef() + poly = returnBoundBoxM(tmpGeom, metersRadius=pointOfInterestList[inFeature.GetField('spaceNetFeature')]['featureBBoxSizeM']) + + outFeature.SetGeometry(poly) + + outLayer.CreateFeature(outFeature) + outFeature = None + inFeature = None + + srcDS = None + dstDS = None + +def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName='', + className='', + outputDirectory=''): + + + chipSummaryList = [] + srcDS = srcDS = ogr.Open(srcVectorFile, 0) + srcLayer = srcDS.GetLayer() + + + shapeSrcList = [] + for shapeFileSrc in shapeFileSrcList: + print(shapeFileSrc[1]) + shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]]) + + ## determinePixelSize in Meters + print rasterFileList + print rasterFileList[0][0] + srcRaster = gdal.Open(rasterFileList[0][0]) + geoTransform = srcRaster.GetGeoTransform() + metersPerPix = abs(round(geoTransform[5]*111111,1)) + + imgId = 0 + for feature in srcLayer: + + geom = feature.GetGeometryRef() + + if geom.GetGeometryName != 'POINT': + geom = geom.Centroid() + + + spacenetFeatureName = feature.GetField('spaceNetFeature') + + clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM'] + halfClipSizeXm = round((clipSize/metersPerPix)/2) + + xCenter = geom.GetX() + yCenter = geom.GetY() + + maxXCut = xCenter + halfClipSizeXm*geoTransform[1] + maxYCut = yCenter + abs(halfClipSizeXm*geoTransform[5]) + minYCut = yCenter - abs(halfClipSizeXm*geoTransform[5]) + minXCut = xCenter - halfClipSizeXm*geoTransform[1] + + imgId = imgId + 1 + + chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=[], + minpartialPerc=0, + outputPrefix='', + createPix=False, + rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), + className=spacenetFeatureName.replace(' ', ''), + baseName=baseName, + imgId=imgId) + + chipSummaryList.append(chipSummary) + + return chipSummaryList + +if __name__ == '__main__': + + createOutVectorFile = True + srcVectorFile = '/path/To/PointOfInterestSummary.geojson' + outVectorFile = '/path/To/PolygonOfInterestSummary.geojson' + outputDirectory = '/path/To/processedDataPOI/' + className = 'POIAll' + baseName = 'AOI_1_RIO' + + # List of Raster Images to Chip and an appropriate label. + # This list will take any type of Raster supported by GDAL + # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one + # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing + rasterFileList = [['/path/To/3-BandVRT.vrt', '3band'], + ['/path/To/8-BandVRT.vrt', '8band'] + ] + + shapeFileSrcList = [ + [outVectorFile, 'POIAll'] + ] + + # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size. + if createOutVectorFile: + createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList) + + + # create Folder Structure to place files into. + + for rasterFile in rasterFileList: + for featureName in pointOfInterestList.keys(): + tmpPath = os.path.join(outputDirectory, rasterFile[1], featureName.replace(' ', '')) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + # create Processed Point of Interest Data. + createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName=baseName, + className='', + outputDirectory=outputDirectory) + + + + + From a4aaffc1c84dd88c7670bb6429018806d62638cc Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 15:16:47 -0400 Subject: [PATCH 02/26] moved pointOfInterestList to JSON file for use accross functions --- configFiles/AOI_1_Rio_POI_Description.json | 84 +++++++++++++++++ python/createSpaceNetPOI.py | 103 +++------------------ 2 files changed, 97 insertions(+), 90 deletions(-) create mode 100644 configFiles/AOI_1_Rio_POI_Description.json diff --git a/configFiles/AOI_1_Rio_POI_Description.json b/configFiles/AOI_1_Rio_POI_Description.json new file mode 100644 index 0000000..01661f7 --- /dev/null +++ b/configFiles/AOI_1_Rio_POI_Description.json @@ -0,0 +1,84 @@ +{ + "Airfield_POIApron Lights Tower": { + "featureIdNum": 1, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10 + }, + "Airfield_POIHangar": { + "featureIdNum": 2, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, + "Airfield_POIHelipad": { + "featureIdNum": 3, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, +"Airfield_POISupport Facility": { + "featureIdNum": 4, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10 + }, +"Bridges_TunnelsBridgePedestrian": { + "featureIdNum": 5, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, +"Bridges_TunnelsBridgeRoad": { + "featureIdNum": 6, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, +"Commercial_POIServiceBar": { + "featureIdNum": 7, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, +"Electrical_POITransmission Tower": { + "featureIdNum": 8, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10 + }, +"EmbassiesConsulate": { + "featureIdNum": 9, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, +"Olympic_Venues": { + "featureIdNum": 10, + "featureChipScaleM": 200, + "featureBBoxSizeM": 50 + }, +"Public_Transportation_POIBusStop": { + "featureIdNum": 11, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5 + }, +"Railways_POI999999Station": { + "featureIdNum": 12, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + }, +"Recreation_POIPark": { + "featureIdNum": 13, + "featureChipScaleM": 200, + "featureBBoxSizeM": 50 + }, +"Recreation_POIPool": { + "featureIdNum": 14, + "featureChipScaleM": 200, + "featureBBoxSizeM": 50 + }, +"Recreation_POISports Facility": { + "featureIdNum": 15, + "featureChipScaleM": 200, + "featureBBoxSizeM": 50 + }, +"Religious_InstitutionsChurch": { + "featureIdNum": 16, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25 + } +} + + diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 8de7204..d4815ec 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -1,6 +1,6 @@ from osgeo import ogr, gdal, osr from spaceNetUtilities import geoTools as gT -import argparse +import json import os @@ -16,95 +16,6 @@ def returnBoundBoxM(tmpGeom, metersRadius=50): return polyEnv -### Define Point of Interest Dictionary -# {'spacenetFeatureName': -# {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping -# 'featureChipScaleM': 200, # Size of chip to create in meters -# 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point -# # indicating in the geojson. This will be used in creating polygons for IOU scoring -# } - -pointOfInterestList = {'Airfield_POIApron Lights Tower': { - 'featureIdNum': '1', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 10 - }, - 'Airfield_POIHangar': { - 'featureIdNum': '2', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, - 'Airfield_POIHelipad': { - 'featureIdNum': '3', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -'Airfield_POISupport Facility': { - 'featureIdNum': '4', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 10 - }, -'Bridges_TunnelsBridgePedestrian': { - 'featureIdNum': '5', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -'Bridges_TunnelsBridgeRoad': { - 'featureIdNum': '6', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -'Commercial_POIServiceBar': { - 'featureIdNum': '7', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -'Electrical_POITransmission Tower': { - 'featureIdNum': '8', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 10 - }, -'EmbassiesConsulate': { - 'featureIdNum': '9', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -'Olympic_Venues': { - 'featureIdNum': '10', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 50 - }, -'Public_Transportation_POIBusStop': { - 'featureIdNum': '11', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 5 - }, -'Railways_POI999999Station': { - 'featureIdNum': '12', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -'Recreation_POIPark': { - 'featureIdNum': '13', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 50 - }, -'Recreation_POIPool': { - 'featureIdNum': '14', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 50 - }, -'Recreation_POISports Facility': { - 'featureIdNum': '15', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 50 - }, -'Religious_InstitutionsChurch': { - 'featureIdNum': '16', - 'featureChipScaleM': 200, - 'featureBBoxSizeM': 25 - }, -} def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList): @@ -222,6 +133,7 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s outputDirectory = '/path/To/processedDataPOI/' className = 'POIAll' baseName = 'AOI_1_RIO' + featureDescriptionJson = '../configFiles/AOI_1_Rio_POI_Description.json' # List of Raster Images to Chip and an appropriate label. # This list will take any type of Raster supported by GDAL @@ -235,6 +147,17 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s [outVectorFile, 'POIAll'] ] + ### Define Point of Interest Dictionary + # {'spacenetFeatureName': + # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping + # 'featureChipScaleM': 200, # Size of chip to create in meters + # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point + # # indicating in the geojson. This will be used in creating polygons for IOU scoring + # } + + with open(featureDescriptionJson, 'r') as j: + pointOfInterestList = json.load(j) + # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size. if createOutVectorFile: createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList) From 55854e296861fc8e94e1727384ca1ba9bbcd83e0 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 18:10:33 -0400 Subject: [PATCH 03/26] modified labeltools/geoTools to support loading of JSON with dictionary of Object Identifiers --- python/createDataSpaceNet.py | 16 ++++++++-- python/spaceNetUtilities/geoTools.py | 42 +++++++++++++++++++++----- python/spaceNetUtilities/labelTools.py | 29 ++++++++++++------ 3 files changed, 67 insertions(+), 20 deletions(-) diff --git a/python/createDataSpaceNet.py b/python/createDataSpaceNet.py index c831c8e..2f5e9f5 100644 --- a/python/createDataSpaceNet.py +++ b/python/createDataSpaceNet.py @@ -53,7 +53,9 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType=' outputPixType='', datasetName='spacenetV2', folder_name='folder_name', - bboxResize=1.0 + bboxResize=1.0, + configJson='', + attributeName='' ): if outputPixType == '': @@ -82,7 +84,9 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType=' convertTo8Bit=convertTo8Bit, outputPixType=outputPixType, outputFormat=outputFormat, - bboxResize=bboxResize + bboxResize=bboxResize, + configJson=configJson, + attributeName=attributeName ) elif annotationType=='DARKNET': entry = lT.geoJsonToDARKNET(annotationName, chipSummary['geoVectorName'], chipSummary['rasterSource'], @@ -228,7 +232,7 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8, action='store_true') parser.add_argument("--featureName", help='Type of feature to be summarized by csv (i.e. Building)' - 'Currently in SpaceNet V2 Building is only label', + 'Currently in SpaceNet V2 Building is the only label', type=str, default='Buildings') parser.add_argument("--spacenetVersion", @@ -247,6 +251,12 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8, type=float, default=1.0) + parser.add_argument("--POIConfigJson", + help='JSON file containing information about valid Points of interest', + default='') + parser.add_argument("--attributeName", + help='Attribute in GeoJson to pull the feature name from', + default='spacenetFeature') args = parser.parse_args() entryList = [] diff --git a/python/spaceNetUtilities/geoTools.py b/python/spaceNetUtilities/geoTools.py index bcfefd1..b24be1d 100644 --- a/python/spaceNetUtilities/geoTools.py +++ b/python/spaceNetUtilities/geoTools.py @@ -542,16 +542,24 @@ def geoWKTToPixelWKT(geom, inputRaster, targetSR, geomTransform, pixPrecision=2) def convert_wgs84geojson_to_pixgeojson(wgs84geojson, inputraster, image_id=[], pixelgeojson=[], only_polygons=True, - breakMultiPolygonGeo=True, pixPrecision=2): + breakMultiPolygonGeo=True, pixPrecision=2, + attributeName='', + objectClassDict=''): dataSource = ogr.Open(wgs84geojson, 0) layer = dataSource.GetLayer() #print(layer.GetFeatureCount()) building_id = 0 # check if geoJsonisEmpty - buildinglist = [] + feautureList = [] if not image_id: image_id = inputraster.replace(".tif", "") + + + + + + if layer.GetFeatureCount() > 0: if len(inputraster)>0: @@ -564,7 +572,21 @@ def convert_wgs84geojson_to_pixgeojson(wgs84geojson, inputraster, image_id=[], p + for feature in layer: + if attributeName != '': + featureName = feature.GetField(attributeName) + else: + featureName = 'building' + + if len(objectClassDict) > 0: + try: + featureId = objectClassDict[featureName]['featureIdNum'] + except: + raise('featureName {} not recognized'.format(featureName)) + else: + featureId = 1 + geom = feature.GetGeometryRef() if len(inputraster)>0: @@ -579,27 +601,31 @@ def convert_wgs84geojson_to_pixgeojson(wgs84geojson, inputraster, image_id=[], p for geom_wkt in geom_wkt_list: building_id += 1 - buildinglist.append({'ImageId': image_id, + feautureList.append({'ImageId': image_id, 'BuildingId': building_id, 'polyGeo': ogr.CreateGeometryFromWkt(geom_wkt[1]), - 'polyPix': ogr.CreateGeometryFromWkt(geom_wkt[0]) + 'polyPix': ogr.CreateGeometryFromWkt(geom_wkt[0]), + 'featureName': featureName, + 'featureId': featureId }) else: building_id += 1 - buildinglist.append({'ImageId': image_id, + feautureList.append({'ImageId': image_id, 'BuildingId': building_id, 'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()), - 'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY') + 'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY'), + 'featureName' : featureName, + 'featureId': featureId }) else: #print("no File exists") pass if pixelgeojson: - exporttogeojson(pixelgeojson, buildinglist=buildinglist) + exporttogeojson(pixelgeojson, buildinglist=feautureList) - return buildinglist + return feautureList def convert_pixgwktList_to_wgs84wktList(inputRaster, wktPolygonPixList): ## returns # [[GeoWKT, PixWKT], ...] diff --git a/python/spaceNetUtilities/labelTools.py b/python/spaceNetUtilities/labelTools.py index d8971d6..09ac9ab 100644 --- a/python/spaceNetUtilities/labelTools.py +++ b/python/spaceNetUtilities/labelTools.py @@ -508,16 +508,19 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', convertTo8Bit=True, outputPixType='Byte', outputFormat='GTiff', - bboxResize=1.0): + bboxResize=1.0, + objectClassDict='', + attributeName=''): print("creating {}".format(xmlFileName)) - buildingList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True, - breakMultiPolygonGeo=True, pixPrecision=2) - # buildinglist.append({'ImageId': image_id, + featureList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True, + breakMultiPolygonGeo=True, pixPrecision=2, attributeName=attributeName, + objectClassDict=objectClassDict) + # featureList.append({'ImageId': image_id, #'BuildingId': building_id, #'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()), #'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY') - #}) + #'featuerName': featureName}) @@ -582,13 +585,13 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', SubElement(top, 'segmented').text = str(segmented) # start object segment - for building in buildingList: - objectType = 'building' + for feature in featureList: + objectType = feature['featureName'] objectPose = 'Left' objectTruncated = 0 # 1=True, 0 = False objectDifficulty = 0 # 0 Easy - 3 Hard - env = building['polyPix'].GetEnvelope() + env = feature['polyPix'].GetEnvelope() xmin=env[0] ymin=env[2] xmax=env[1] @@ -638,6 +641,8 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', idField = ogr.FieldDefn("objid", ogr.OFTInteger) innerBufferLayer.CreateField(idField) + idField = ogr.FieldDefn("clsid", ogr.OFTInteger) + innerBufferLayer.CreateField(idField) featureDefn = innerBufferLayer.GetLayerDefn() bufferDist = srcRaster.GetGeoTransform()[1]*bufferSizePix @@ -658,6 +663,12 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', inBufFeature = ogr.Feature(featureDefn) inBufFeature.SetGeometry(geomBufferIn) inBufFeature.SetField('objid', idx) + if attributeName !='': + inBufFeature.SetField('clsid', objectClassDict[feature.GetField(attributeName)]['featureIdNum']) + else: + inBufFeature.SetField('clsid', 100) + + innerBufferLayer.CreateFeature(inBufFeature) outBufFeature = None @@ -681,7 +692,7 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', print('rasterize outer buffer') gdal.RasterizeLayer(target_ds, [1], outerBufferLayer, burn_values=[255]) print('rasterize inner buffer') - gdal.RasterizeLayer(target_ds, [1], innerBufferLayer, burn_values=[100]) + gdal.RasterizeLayer(target_ds, [1], innerBufferLayer, burn_values=[100], options='ATTRIBUTE=clsid') print('writing png sgcls') # write to .png imageArray = np.array(target_ds.GetRasterBand(1).ReadAsArray()) From 4b2f1236376aa9dea600f84504d992fb0cb7ce02 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 18:17:49 -0400 Subject: [PATCH 04/26] modified createDataSpaceNet.py to read json for config --- python/createDataSpaceNet.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/python/createDataSpaceNet.py b/python/createDataSpaceNet.py index 2f5e9f5..f4050ab 100644 --- a/python/createDataSpaceNet.py +++ b/python/createDataSpaceNet.py @@ -4,6 +4,7 @@ from spaceNetUtilities import labelTools as lT from spaceNetUtilities import geoTools as gT import argparse +import json def processRasterChip(rasterImage, rasterDescription, geojson, geojsonDescription, outputDirectory='', @@ -54,7 +55,7 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType=' datasetName='spacenetV2', folder_name='folder_name', bboxResize=1.0, - configJson='', + objectClassDict='', attributeName='' ): @@ -85,7 +86,7 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType=' outputPixType=outputPixType, outputFormat=outputFormat, bboxResize=bboxResize, - configJson=configJson, + objectClassDict=objectClassDict, attributeName=attributeName ) elif annotationType=='DARKNET': @@ -273,7 +274,7 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8, geojsonDirectory = os.path.join('vectordata','geojson') # 'vectordata/geojson' else: - print('Bad Spacenet Version Submitted, Version {} is not supported'.foramt(args.spacenetVersion)) + print('Bad Spacenet Version Submitted, Version {} is not supported'.format(args.spacenetVersion)) if args.convertTo8Bit: @@ -289,11 +290,19 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8, else: fullPathAnnotationsDirectory = args.outputDirectory + if args.POIConfigJson == '': + objectClassDict='' + else: + + with open(args.POIConfigJson, 'r') as f: + objectClassDict = json.load(f) + for aoiSubDir in listOfAOIs: fullPathSubDir = os.path.join(srcSpaceNetDirectory, aoiSubDir) ## Create Annotations directory - #fullPathAnnotationsDirectory = os.path.join(fullPathSubDir, annotationsDirectory) + ## fullPathAnnotationsDirectory = os.path.join(fullPathSubDir, annotationsDirectory) + if not os.path.exists(fullPathAnnotationsDirectory): os.makedirs(fullPathAnnotationsDirectory) if not os.path.exists(os.path.join(fullPathAnnotationsDirectory, 'annotations')): @@ -331,7 +340,8 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8, outputPixType=outputDataType, datasetName='spacenetV2', folder_name='folder_name', - bboxResize= args.boundingBoxResize + bboxResize= args.boundingBoxResize, + objectClassDict=objectClassDict ) print(entryListTmp) entryList.extend(entryListTmp) From 7abe0ce02104ad8bfdc9cd3ea95c711a627c7ce8 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 18:50:36 -0400 Subject: [PATCH 05/26] added python/createSpaceNetPOI.py capability to no sort images by class Name. THis makes post processing easier but data exploration harder --- python/createSpaceNetPOI.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index d4815ec..295437f 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -66,7 +66,9 @@ def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList): def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, baseName='', className='', - outputDirectory=''): + outputDirectory='', + seperateImageFolders=False + ): chipSummaryList = [] @@ -96,7 +98,10 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s spacenetFeatureName = feature.GetField('spaceNetFeature') - + if seperateImageFolders: + className = spacenetFeatureName.replace(' ', '') + else: + className = '' clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM'] halfClipSizeXm = round((clipSize/metersPerPix)/2) @@ -117,7 +122,7 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s outputPrefix='', createPix=False, rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), - className=spacenetFeatureName.replace(' ', ''), + className=className, baseName=baseName, imgId=imgId) @@ -134,6 +139,7 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s className = 'POIAll' baseName = 'AOI_1_RIO' featureDescriptionJson = '../configFiles/AOI_1_Rio_POI_Description.json' + seperateImageFolders = False # List of Raster Images to Chip and an appropriate label. # This list will take any type of Raster supported by GDAL @@ -175,7 +181,8 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, baseName=baseName, className='', - outputDirectory=outputDirectory) + outputDirectory=outputDirectory, + seperateImageFolders=seperateImageFolders) From 08b14e8ddca5d126a0c7d7f0f76c3fb888afaa3b Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 19:04:15 -0400 Subject: [PATCH 06/26] fixed simple error in gdalRasterizeLayer attribute=c;assid. Must be part of list not string --- python/spaceNetUtilities/labelTools.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/python/spaceNetUtilities/labelTools.py b/python/spaceNetUtilities/labelTools.py index 09ac9ab..50384f4 100644 --- a/python/spaceNetUtilities/labelTools.py +++ b/python/spaceNetUtilities/labelTools.py @@ -520,7 +520,8 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', #'BuildingId': building_id, #'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()), #'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY') - #'featuerName': featureName}) + #'featuerName': featureName, + # 'featureIdNum': featureIdNum}) @@ -692,7 +693,7 @@ def geoJsonToPASCALVOC2012(xmlFileName, geoJson, rasterImageName, im_id='', print('rasterize outer buffer') gdal.RasterizeLayer(target_ds, [1], outerBufferLayer, burn_values=[255]) print('rasterize inner buffer') - gdal.RasterizeLayer(target_ds, [1], innerBufferLayer, burn_values=[100], options='ATTRIBUTE=clsid') + gdal.RasterizeLayer(target_ds, [1], innerBufferLayer, burn_values=[100], options=['ATTRIBUTE=clsid']) print('writing png sgcls') # write to .png imageArray = np.array(target_ds.GetRasterBand(1).ReadAsArray()) @@ -753,17 +754,22 @@ def geoJsonToDARKNET(xmlFileName, geoJson, rasterImageName, im_id='', convertTo8Bit=True, outputPixType='Byte', outputFormat='GTiff', - bboxResize=1.0): + bboxResize=1.0, + objectClassDict='', + attributeName=''): + xmlFileName = xmlFileName.replace(".xml", ".txt") print("creating {}".format(xmlFileName)) - buildingList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True, - breakMultiPolygonGeo=True, pixPrecision=2) - # buildinglist.append({'ImageId': image_id, + featureList = gT.convert_wgs84geojson_to_pixgeojson(geoJson, rasterImageName, image_id=[], pixelgeojson=[], only_polygons=True, + breakMultiPolygonGeo=True, pixPrecision=2, attributeName=attributeName, + objectClassDict=objectClassDict) + # featureList.append({'ImageId': image_id, #'BuildingId': building_id, #'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()), #'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY') - #}) + #'featuerName': featureName, + #'featureIdNum': featureIdNum}) srcRaster = gdal.Open(rasterImageName) @@ -799,12 +805,12 @@ def geoJsonToDARKNET(xmlFileName, geoJson, rasterImageName, im_id='', with open(xmlFileName, 'w') as f: - for building in buildingList: + for feature in featureList: # Get Envelope returns a tuple (minX, maxX, minY, maxY) - boxDim = building['polyPix'].GetEnvelope() + boxDim = feature['polyPix'].GetEnvelope() if bboxResize != 1.0: xmin = boxDim[0] @@ -825,7 +831,7 @@ def geoJsonToDARKNET(xmlFileName, geoJson, rasterImageName, im_id='', rasterSize = (srcRaster.RasterXSize, srcRaster.RasterYSize) lineOutput = convertPixDimensionToPercent(rasterSize, boxDim) - classNum=0 + classNum=feature['featureIdNum'] f.write('{} {} {} {} {}\n'.format(classNum, lineOutput[0], lineOutput[1], From 8a87409aafebc166d6b65707ad9b92371737210a Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 19:13:36 -0400 Subject: [PATCH 07/26] removed raising error if featureName is not recognized --- python/spaceNetUtilities/geoTools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/spaceNetUtilities/geoTools.py b/python/spaceNetUtilities/geoTools.py index b24be1d..be440f1 100644 --- a/python/spaceNetUtilities/geoTools.py +++ b/python/spaceNetUtilities/geoTools.py @@ -583,7 +583,7 @@ def convert_wgs84geojson_to_pixgeojson(wgs84geojson, inputraster, image_id=[], p try: featureId = objectClassDict[featureName]['featureIdNum'] except: - raise('featureName {} not recognized'.format(featureName)) + print('featureName {} not recognized'.format(featureName)) else: featureId = 1 From 99d838c930d91df9c282e8c68f6260bab7e7a7af Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 19:16:34 -0400 Subject: [PATCH 08/26] fixed omitted createDataSpaceNet.py --- python/createDataSpaceNet.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/createDataSpaceNet.py b/python/createDataSpaceNet.py index f4050ab..222a7e7 100644 --- a/python/createDataSpaceNet.py +++ b/python/createDataSpaceNet.py @@ -341,7 +341,8 @@ def createTrainTestSplitSummary(entryList, trainTestSplit=0.8, datasetName='spacenetV2', folder_name='folder_name', bboxResize= args.boundingBoxResize, - objectClassDict=objectClassDict + objectClassDict=objectClassDict, + attributeName=args.attributeName ) print(entryListTmp) entryList.extend(entryListTmp) From 67241008f66585c9f10c450abb919cc45b153096 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 19:22:06 -0400 Subject: [PATCH 09/26] updated createDataSpaceNet.py to support DARKNET POI testing --- python/createDataSpaceNet.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/createDataSpaceNet.py b/python/createDataSpaceNet.py index 222a7e7..9e350c5 100644 --- a/python/createDataSpaceNet.py +++ b/python/createDataSpaceNet.py @@ -97,7 +97,9 @@ def processChipSummaryList(chipSummaryList, outputDirectory='', annotationType=' convertTo8Bit=convertTo8Bit, outputPixType=outputPixType, outputFormat=outputFormat, - bboxResize=bboxResize + bboxResize=bboxResize, + objectClassDict=objectClassDict, + attributeName=attributeName ) elif annotationType=='SBD': From fcf63cfafe2ae5a97b9e6382c599e7d3a65052b1 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Thu, 13 Apr 2017 19:24:52 -0400 Subject: [PATCH 10/26] updated geoTools.py so that featureId number is featureIdNUm --- python/spaceNetUtilities/geoTools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/spaceNetUtilities/geoTools.py b/python/spaceNetUtilities/geoTools.py index be440f1..b7d6002 100644 --- a/python/spaceNetUtilities/geoTools.py +++ b/python/spaceNetUtilities/geoTools.py @@ -606,7 +606,7 @@ def convert_wgs84geojson_to_pixgeojson(wgs84geojson, inputraster, image_id=[], p 'polyGeo': ogr.CreateGeometryFromWkt(geom_wkt[1]), 'polyPix': ogr.CreateGeometryFromWkt(geom_wkt[0]), 'featureName': featureName, - 'featureId': featureId + 'featureIdNum': featureId }) else: building_id += 1 @@ -615,7 +615,7 @@ def convert_wgs84geojson_to_pixgeojson(wgs84geojson, inputraster, image_id=[], p 'polyGeo': ogr.CreateGeometryFromWkt(geom.ExportToWkt()), 'polyPix': ogr.CreateGeometryFromWkt('POLYGON EMPTY'), 'featureName' : featureName, - 'featureId': featureId + 'featureIdNum': featureId }) else: #print("no File exists") From 8360a7c9e8bc2247142bdca09bab896782e227ff Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Mon, 17 Apr 2017 19:23:13 -0400 Subject: [PATCH 11/26] updated featureBBoxSizeM for better fit --- configFiles/AOI_1_Rio_POI_Description.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/configFiles/AOI_1_Rio_POI_Description.json b/configFiles/AOI_1_Rio_POI_Description.json index 01661f7..9e4f992 100644 --- a/configFiles/AOI_1_Rio_POI_Description.json +++ b/configFiles/AOI_1_Rio_POI_Description.json @@ -12,7 +12,7 @@ "Airfield_POIHelipad": { "featureIdNum": 3, "featureChipScaleM": 200, - "featureBBoxSizeM": 25 + "featureBBoxSizeM": 18 }, "Airfield_POISupport Facility": { "featureIdNum": 4, @@ -22,12 +22,12 @@ "Bridges_TunnelsBridgePedestrian": { "featureIdNum": 5, "featureChipScaleM": 200, - "featureBBoxSizeM": 25 + "featureBBoxSizeM": 20 }, "Bridges_TunnelsBridgeRoad": { "featureIdNum": 6, "featureChipScaleM": 200, - "featureBBoxSizeM": 25 + "featureBBoxSizeM": 20 }, "Commercial_POIServiceBar": { "featureIdNum": 7, @@ -37,7 +37,7 @@ "Electrical_POITransmission Tower": { "featureIdNum": 8, "featureChipScaleM": 200, - "featureBBoxSizeM": 10 + "featureBBoxSizeM": 12 }, "EmbassiesConsulate": { "featureIdNum": 9, @@ -67,12 +67,12 @@ "Recreation_POIPool": { "featureIdNum": 14, "featureChipScaleM": 200, - "featureBBoxSizeM": 50 + "featureBBoxSizeM": 10 }, "Recreation_POISports Facility": { "featureIdNum": 15, "featureChipScaleM": 200, - "featureBBoxSizeM": 50 + "featureBBoxSizeM": 30 }, "Religious_InstitutionsChurch": { "featureIdNum": 16, From 71c283aaabf8cd56bfe29b55cd47043604a60bfd Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Tue, 18 Apr 2017 14:21:27 -0400 Subject: [PATCH 12/26] updated createSpaceNetPOI.py to do train/test split by region --- python/createSpaceNetPOI.py | 97 +++++++++++++++++++++++++++++++------ 1 file changed, 83 insertions(+), 14 deletions(-) diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 295437f..5bd6adc 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -2,6 +2,7 @@ from spaceNetUtilities import geoTools as gT import json import os +import subprocess @@ -130,6 +131,52 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s return chipSummaryList + +def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): + + + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMax = latCutOff + + if lonCutOff == -500: + longMax = 500 + else: + longMax = lonCutOff + + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson, + '-clipsrc', longMin, latMin, longMax, latMax] + + subprocess.call(cmd) + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMin = latCutOff + latMax = 500 + + + if lonCutOff == -500: + longMax = 500 + else: + longMin = lonCutOff + longMax = 500 + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson, + '-clipsrc', longMin, latMin, longMax, latMax] + subprocess.call(cmd) + + + return outputGeoJsonTest, outputGeoJsonTrain + + if __name__ == '__main__': createOutVectorFile = True @@ -140,6 +187,23 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s baseName = 'AOI_1_RIO' featureDescriptionJson = '../configFiles/AOI_1_Rio_POI_Description.json' seperateImageFolders = False + splitGeoJson = True + splitGeoJson_latCutOff = -500 #-500 is ignore + splitGeoJson_lonCutOff = -43.25 #-500 is ignore + srcVectorFileList = [[srcVectorFile, 'all']] + if splitGeoJson: + srcVectorTrain, srcVectorTest = splitVectorFile(srcVectorFile, + latCutOff=splitGeoJson_latCutOff, + lonCutOff=splitGeoJson_lonCutOff) + + srcVectorFileList = [ + [srcVectorTrain, 'train'], + [srcVectorTest, 'test'] + ] + + + + # List of Raster Images to Chip and an appropriate label. # This list will take any type of Raster supported by GDAL @@ -153,6 +217,7 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s [outVectorFile, 'POIAll'] ] + ### Define Point of Interest Dictionary # {'spacenetFeatureName': # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping @@ -165,24 +230,28 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s pointOfInterestList = json.load(j) # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size. - if createOutVectorFile: - createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList) + for srcVectorFile, folderType in srcVectorFileList: + outVectorFile = srcVectorFile.replace('.geojson', 'poly.geojson') + if createOutVectorFile: + createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList) - # create Folder Structure to place files into. + outputDirectoryTmp = os.path.join(outputDirectory, folderType) - for rasterFile in rasterFileList: - for featureName in pointOfInterestList.keys(): - tmpPath = os.path.join(outputDirectory, rasterFile[1], featureName.replace(' ', '')) - if not os.path.exists(tmpPath): - os.makedirs(tmpPath) + # create Folder Structure to place files into. - # create Processed Point of Interest Data. - createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, - baseName=baseName, - className='', - outputDirectory=outputDirectory, - seperateImageFolders=seperateImageFolders) + for rasterFile in rasterFileList: + for featureName in pointOfInterestList.keys(): + tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], featureName.replace(' ', '')) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + # create Processed Point of Interest Data. + createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName=baseName, + className='', + outputDirectory=outputDirectoryTmp, + seperateImageFolders=seperateImageFolders) From b78189d3fcb22e7c8acf87526216a9d48ad87f1a Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Tue, 18 Apr 2017 14:24:50 -0400 Subject: [PATCH 13/26] updated createSpaceNetPOI.py to do train/test split, fixed error --- python/createSpaceNetPOI.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 5bd6adc..3f6efec 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -238,6 +238,9 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): outputDirectoryTmp = os.path.join(outputDirectory, folderType) + shapeFileSrcList = [ + [outVectorFile, 'POIAll'] + ] # create Folder Structure to place files into. for rasterFile in rasterFileList: From 6d98ba821d17845c4c69944f39214a39273dbf6a Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Tue, 18 Apr 2017 14:42:10 -0400 Subject: [PATCH 14/26] updated createSpaceNetPOI.py to do train/test split, Add minPartail to include default 0p70, updated for fix to ogr2ogr command calld --- python/createSpaceNetPOI.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 3f6efec..19a5160 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -68,7 +68,8 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s baseName='', className='', outputDirectory='', - seperateImageFolders=False + seperateImageFolders=False, + minPartialToInclue = 0.70 ): @@ -119,7 +120,7 @@ def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, s chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList, maxXCut, maxYCut, minYCut, minXCut, rasterFileBaseList=[], - minpartialPerc=0, + minpartialPerc=minPartialToInclue, outputPrefix='', createPix=False, rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), @@ -150,7 +151,7 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson, - '-clipsrc', longMin, latMin, longMax, latMax] + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] subprocess.call(cmd) longMin = -500 @@ -170,7 +171,7 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson, - '-clipsrc', longMin, latMin, longMax, latMax] + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] subprocess.call(cmd) @@ -254,7 +255,8 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): baseName=baseName, className='', outputDirectory=outputDirectoryTmp, - seperateImageFolders=seperateImageFolders) + seperateImageFolders=seperateImageFolders, + minPartialToInclue=0.70) From 3316b6bad936cc7683bad2fa3396159f81e5f4e8 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 19 Apr 2017 19:23:41 -0400 Subject: [PATCH 15/26] added deduplication feature using geopandas --- python/cleangeoJson.py | 0 python/createSpaceNetPOI.py | 26 +++++++++++++++++++++++--- 2 files changed, 23 insertions(+), 3 deletions(-) create mode 100644 python/cleangeoJson.py diff --git a/python/cleangeoJson.py b/python/cleangeoJson.py new file mode 100644 index 0000000..e69de29 diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 19a5160..1964b10 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -3,7 +3,7 @@ import json import os import subprocess - +import geopandas as gpd def returnBoundBoxM(tmpGeom, metersRadius=50): @@ -175,8 +175,22 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): subprocess.call(cmd) - return outputGeoJsonTest, outputGeoJsonTrain + return outputGeoJsonTrain, outputGeoJsonTest + +def deduplicateGeoJson(geoJsonIn, geoJsonOut=''): + + + df = gpd.read_file(geoJsonIn) + df['y'] = df.geometry.map(lambda p: p.y) + df['x'] = df.geometry.map(lambda p: p.x) + df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True) + df.drop(['x', 'y'], 1, inplace=True) + if geoJsonOut=='': + geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson') + df.to_file(geoJsonOut, driver='GeoJSON') + + return geoJsonOut if __name__ == '__main__': @@ -189,9 +203,15 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): featureDescriptionJson = '../configFiles/AOI_1_Rio_POI_Description.json' seperateImageFolders = False splitGeoJson = True - splitGeoJson_latCutOff = -500 #-500 is ignore + deduplicateGeoJsonFlag = True + + if deduplicateGeoJsonFlag: + srcVectorFile = deduplicateGeoJson(srcVectorFile) + + splitGeoJson_latCutOff = -22.94 #-500 is ignore splitGeoJson_lonCutOff = -43.25 #-500 is ignore srcVectorFileList = [[srcVectorFile, 'all']] + if splitGeoJson: srcVectorTrain, srcVectorTest = splitVectorFile(srcVectorFile, latCutOff=splitGeoJson_latCutOff, From d2c0ff15a766c93e83cd547220abd09b756dfbd2 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 19 Apr 2017 19:24:21 -0400 Subject: [PATCH 16/26] removed python/cleangeoJson.py it was scratch --- python/cleangeoJson.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 python/cleangeoJson.py diff --git a/python/cleangeoJson.py b/python/cleangeoJson.py deleted file mode 100644 index e69de29..0000000 From bdcadfe410403fab83bc386bb8817a3182932b7a Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 19 Apr 2017 19:32:22 -0400 Subject: [PATCH 17/26] added updated Dockerfile to install geopandas --- docker/Dockerfile | 3 +++ docker/standalone/cpu/Dockerfile | 3 +++ docker/standalone/gpu/Dockerfile | 3 +++ 3 files changed, 9 insertions(+) diff --git a/docker/Dockerfile b/docker/Dockerfile index 6520d05..ff87057 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -77,6 +77,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ RUN pip install rtree +## Install GeoPandas +RUN pip install geopandas + ENV GIT_BASE=/opt/ WORKDIR $GIT_BASE diff --git a/docker/standalone/cpu/Dockerfile b/docker/standalone/cpu/Dockerfile index cab2c76..0d406e1 100644 --- a/docker/standalone/cpu/Dockerfile +++ b/docker/standalone/cpu/Dockerfile @@ -48,6 +48,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ RUN pip install rtree +## Install GeoPandas +RUN pip install geopandas + ENV GIT_BASE=/opt/ WORKDIR $GIT_BASE diff --git a/docker/standalone/gpu/Dockerfile b/docker/standalone/gpu/Dockerfile index 112700f..51b12fd 100644 --- a/docker/standalone/gpu/Dockerfile +++ b/docker/standalone/gpu/Dockerfile @@ -47,6 +47,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ RUN pip install rtree +## Install GeoPandas +RUN pip install geopandas + ENV GIT_BASE=/opt/ WORKDIR $GIT_BASE From 49690bcc49025880c6b0aa18a377613817b57942 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 19 Apr 2017 20:18:25 -0400 Subject: [PATCH 18/26] specified encoding type = utf-8 for geopandas read in and out. Neccessary for linux support --- python/createSpaceNetPOI.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 1964b10..4b733f1 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -177,10 +177,10 @@ def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): return outputGeoJsonTrain, outputGeoJsonTest -def deduplicateGeoJson(geoJsonIn, geoJsonOut=''): +def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'): - df = gpd.read_file(geoJsonIn) + df = gpd.read_file(geoJsonIn, encoding=encoding) df['y'] = df.geometry.map(lambda p: p.y) df['x'] = df.geometry.map(lambda p: p.x) df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True) @@ -188,7 +188,7 @@ def deduplicateGeoJson(geoJsonIn, geoJsonOut=''): if geoJsonOut=='': geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson') - df.to_file(geoJsonOut, driver='GeoJSON') + df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding) return geoJsonOut From cb29bae9bb3e63df1a419d6dcfd3b53b5abe0186 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 19 Apr 2017 21:07:49 -0400 Subject: [PATCH 19/26] udpated splitVector for Test Traint to now give bounding box to include --- python/createSpaceNetPOI.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index 4b733f1..acdc6c9 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -4,7 +4,7 @@ import os import subprocess import geopandas as gpd - +from geopandas.geoseries import Polygon def returnBoundBoxM(tmpGeom, metersRadius=50): poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(), @@ -192,6 +192,19 @@ def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'): return geoJsonOut +def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'): + + insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)]) + df = gpd.read_file(geoJson, encoding=encoding) + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding) + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding) + + return outputGeoJsonTrain, outputGeoJsonTest + if __name__ == '__main__': createOutVectorFile = True @@ -208,14 +221,20 @@ def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'): if deduplicateGeoJsonFlag: srcVectorFile = deduplicateGeoJson(srcVectorFile) - splitGeoJson_latCutOff = -22.94 #-500 is ignore - splitGeoJson_lonCutOff = -43.25 #-500 is ignore + splitGeoJson_latMin = -22.94 + splitGeoJson_latMax = 90 + splitGeoJson_lonMin = -43.25 + splitGeoJson_lonMax = 180 + srcVectorFileList = [[srcVectorFile, 'all']] if splitGeoJson: - srcVectorTrain, srcVectorTest = splitVectorFile(srcVectorFile, - latCutOff=splitGeoJson_latCutOff, - lonCutOff=splitGeoJson_lonCutOff) + srcVectorTrain, srcVectorTest = splitVectorFileGPD(srcVectorFile, + latMin=splitGeoJson_latMin, + latMax=splitGeoJson_latMax, + lonMin=splitGeoJson_lonMin, + lonMax=splitGeoJson_lonMax, + ) srcVectorFileList = [ [srcVectorTrain, 'train'], From 2bb86fba7641494ae9c3fd889323cd64e2853202 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 17 May 2017 20:26:28 -0400 Subject: [PATCH 20/26] modified externatlVectorProcessing.py for OSM usage --- python/externalVectorProcessing.py | 32 +++++++++++++++++------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/python/externalVectorProcessing.py b/python/externalVectorProcessing.py index da921fb..157c275 100644 --- a/python/externalVectorProcessing.py +++ b/python/externalVectorProcessing.py @@ -10,9 +10,9 @@ def buildTindex(rasterFolder, rasterExtention='.tif'): rasterList = glob.glob(os.path.join(rasterFolder, '*{}'.format(rasterExtention))) - print(rasterList) + #print(rasterList) - print(os.path.join(rasterFolder, '*{}'.format(rasterExtention))) + #print(os.path.join(rasterFolder, '*{}'.format(rasterExtention))) memDriver = ogr.GetDriverByName('MEMORY') gTindex = memDriver.CreateDataSource('gTindex') @@ -54,7 +54,7 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput else: gTindex = ogr.Open(rasterTileIndex,0) gTindexLayer = gTindex.GetLayer() - + print(vectorSrcFile) shapeSrc = ogr.Open(vectorSrcFile,0) chipSummaryList = [] for feature in gTindexLayer: @@ -77,7 +77,7 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput if __name__ == "__main__": - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser(description='This code will push a new Vector Source into already tiled imagery') parser.add_argument("-imgDir", "--imgDir", type=str, help="Directory of Raster Images") parser.add_argument("-vecSrc", "--vectorSrcFile", type=str, @@ -93,7 +93,8 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput default='.tif') parser.add_argument("-o", "--outputCSV", type=str, - help="Output file name and location for truth summary CSV equivalent to SpacenetV2 competition") + help="Output file name and location for truth summary CSV equivalent to SpacenetV2 competition", + default='') parser.add_argument("-pixPrecision", "--pixelPrecision", type=int, help="Number of decimal places to include for pixel, uses round(xPix, pixPrecision)" "Default = 2", @@ -110,10 +111,10 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput rasterFolderLocation = args.imgDir - vectorSrcFile = args.imgDir + vectorSrcFile = args.vectorSrcFile vectorPrefix = args.vectorPrefix rasterPrefix = args.rasterPrefix - pixPrecision = args.pixPrecision + pixPrecision = args.pixelPrecision createProposalFile = args.CreateProposalFile rasterFileExtension = args.rasterExtension @@ -122,18 +123,21 @@ def createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutput rasterFolderBaseName = os.path.basename(os.path.dirname(rasterFolderLocation)) geoJsonOutputDirectory = os.path.join(os.path.dirname(vectorSrcFile), rasterFolderBaseName) + print geoJsonOutputDirectory + if not os.path.exists(geoJsonOutputDirectory): + os.makedirs(geoJsonOutputDirectory) chipSummaryList = createTiledGeoJsonFromSrc(rasterFolderLocation, vectorSrcFile, geoJsonOutputDirectory, rasterTileIndex='', geoJsonPrefix=vectorPrefix, rasterFileExtenstion=rasterFileExtension, rasterPrefixToReplace=rasterPrefix ) - - outputCSVFileName = geoJsonOutputDirectory+"OSM_Proposal.csv" - lT.createCSVSummaryFile(chipSummaryList, outputCSVFileName, - replaceImageID=rasterPrefix+"_", - pixPrecision=pixPrecision, - createProposalsFile=createProposalFile - ) + if args.outputCSV != '': + outputCSVFileName = args.outputCSV + lT.createCSVSummaryFile(chipSummaryList, outputCSVFileName, + replaceImageID=rasterPrefix+"_", + pixPrecision=pixPrecision, + createProposalsFile=createProposalFile + ) From b2edd0c4459b25e5c011740f1e173f8ffc13e9d3 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Wed, 17 May 2017 20:54:03 -0400 Subject: [PATCH 21/26] update geoTools.py --- python/spaceNetUtilities/geoTools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/spaceNetUtilities/geoTools.py b/python/spaceNetUtilities/geoTools.py index 6129afc..a26f1e9 100644 --- a/python/spaceNetUtilities/geoTools.py +++ b/python/spaceNetUtilities/geoTools.py @@ -806,7 +806,7 @@ def clipShapeFile(shapeSrc, outputFileName, polyToCut, minpartialPerc=0.0, shape outGeoJSon = os.path.splitext(outputFileName)[0] + '.geojson' if not os.path.exists(os.path.dirname(outGeoJSon)): os.makedirs(os.path.dirname(outGeoJSon)) - + print(outGeoJSon) outDriver = ogr.GetDriverByName("geojson") if os.path.exists(outGeoJSon): outDriver.DeleteDataSource(outGeoJSon) From edbad36f8ccde9b95ebb323f4833b9ae0aa39ca1 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Sun, 13 Aug 2017 12:05:40 -0400 Subject: [PATCH 22/26] mid status geoToolsGPD.py update --- configFiles/OSM_Power.json | 0 python/createSpaceNetPOI.py | 10 +- python/createSpaceNetPOI_Round2.py | 356 +++++++ python/spaceNetUtilities/geoToolsGPD.py | 1063 ++++++++++++++++++++ python/spaceNetUtilities/inferenceTools.py | 0 python/spaceNetUtilities/rasterTools.py | 0 6 files changed, 1423 insertions(+), 6 deletions(-) create mode 100644 configFiles/OSM_Power.json create mode 100644 python/createSpaceNetPOI_Round2.py create mode 100644 python/spaceNetUtilities/geoToolsGPD.py create mode 100644 python/spaceNetUtilities/inferenceTools.py create mode 100644 python/spaceNetUtilities/rasterTools.py diff --git a/configFiles/OSM_Power.json b/configFiles/OSM_Power.json new file mode 100644 index 0000000..e69de29 diff --git a/python/createSpaceNetPOI.py b/python/createSpaceNetPOI.py index acdc6c9..35d2572 100644 --- a/python/createSpaceNetPOI.py +++ b/python/createSpaceNetPOI.py @@ -211,9 +211,9 @@ def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8' srcVectorFile = '/path/To/PointOfInterestSummary.geojson' outVectorFile = '/path/To/PolygonOfInterestSummary.geojson' outputDirectory = '/path/To/processedDataPOI/' + baseName = 'AOI_3_Paris' + featureDescriptionJson = '../configFiles/OSM_Power.json' className = 'POIAll' - baseName = 'AOI_1_RIO' - featureDescriptionJson = '../configFiles/AOI_1_Rio_POI_Description.json' seperateImageFolders = False splitGeoJson = True deduplicateGeoJsonFlag = True @@ -249,13 +249,11 @@ def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8' # This list will take any type of Raster supported by GDAL # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing - rasterFileList = [['/path/To/3-BandVRT.vrt', '3band'], + rasterFileList = [['/path/To/Pan-Band.vrt', 'Pan'], ['/path/To/8-BandVRT.vrt', '8band'] ] - shapeFileSrcList = [ - [outVectorFile, 'POIAll'] - ] + ### Define Point of Interest Dictionary diff --git a/python/createSpaceNetPOI_Round2.py b/python/createSpaceNetPOI_Round2.py new file mode 100644 index 0000000..cd39987 --- /dev/null +++ b/python/createSpaceNetPOI_Round2.py @@ -0,0 +1,356 @@ +from osgeo import ogr, gdal, osr +from spaceNetUtilities import geoTools as gT +import json +import os +import subprocess +import geopandas as gpd +from geopandas.geoseries import Polygon +import osmnx as ox +def returnBoundBoxM(tmpGeom, metersRadius=50): + poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(), + metersRadius) + + polyEnv = gT.get_envelope(poly) + + + + + return polyEnv + +def createPolygonShapeFileGPD(srcVectorFile, pointOfInterestList, outVectorFile=''): + gdfOrig = gpd.read_file(srcVectorFile) + + firstKey = True + + + + ## Iterage through keys for valid points of interest, These correspond to OSM Tags such as Power or Places. + for pointOfInterestKey in pointOfInterestList.keys(): + ## Select from Vector File all rows where Point of Interest Columns is not an empty String + gdfKey = gdfOrig[gdfOrig[pointOfInterestKey] != ''] + + # Get list of defined features in the set Tag List, i.e. power=objectName + + objectNameList = pointOfInterestList[pointOfInterestKey].keys() + + # iterate through object names to apply specific actions with regard to the object like bounding box size. + + for objectName in objectNameList: + # select only those objects where the key=object i.e power=tower + gdfPOITemp = gdfKey[gdfKey[pointOfInterestKey] == objectName] + + # "compensator": { + # "featureIdNum": 1, + # "featureChipScaleM": 200, + # "featureBBoxSizeM": 5 + # }, + # project subset to UTM so that actions can be done with regard to meters + gdfPOITempUTM = ox.project_gdf(gdfPOITemp) + # buffer geometry to create circle around point with x meters + gdfPOITempUTM.geometry=gdfPOITempUTM.buffer(pointOfInterestList[pointOfInterestKey][objectName]['featureBBoxSizeM']) + # calculate the envelope of the circle to create a bounding box for each object + gdfPOITempUTM.geometry=gdfPOITempUTM.envelope + + # reporject to wgs84 (lat long) + gdfPOITemp = ox.project_gdf(gdfPOITempUTM, to_latlong=True) + # assign a name to spacefeat for future use in the vector: "power:tower" + gdfPOITemp['spacefeat']=pointOfInterestList[pointOfInterestKey][objectName]['spaceFeatureName'] + + # if first key, create final gdf + if firstKey: + gdfFinal = gdfPOITemp + else: + # else add new to end of gdfFianl + gdfFinal = gdfFinal.concat(gdfPOITemp) + + if outVectorFile != '': + gdfFinal.to_file(outVectorFile) + + return gdfFinal + +def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList): + + ## Turn Point File into PolygonFile + + + srcDS = ogr.Open(srcVectorFile, 0) + inLayer = srcDS.GetLayer() + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + outDriver = ogr.GetDriverByName("GeoJSON") + if os.path.exists(outVectorFile): + outDriver.DeleteDataSource(outVectorFile) + + dstDS = outDriver.CreateDataSource(outVectorFile) + + outLayer = dstDS.CreateLayer('polygons', srs, geom_type=ogr.wkbPolygon) + + # Get Attribute names + inLayerDefn = inLayer.GetLayerDefn() + for i in range(0, inLayerDefn.GetFieldCount()): + fieldDefn = inLayerDefn.GetFieldDefn(i) + outLayer.CreateField(fieldDefn) + + outLayerDefn = outLayer.GetLayerDefn() + + # Copy Features + for i in range(0, inLayer.GetFeatureCount()): + + inFeature = inLayer.GetFeature(i) + outFeature = ogr.Feature(outLayerDefn) + for i in range(0, outLayerDefn.GetFieldCount()): + outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) + # Set geometry as centroid + tmpGeom = inFeature.GetGeometryRef() + poly = returnBoundBoxM(tmpGeom, metersRadius=pointOfInterestList[inFeature.GetField('spaceNetFeature')]['featureBBoxSizeM']) + + outFeature.SetGeometry(poly) + + outLayer.CreateFeature(outFeature) + outFeature = None + inFeature = None + + srcDS = None + dstDS = None + +def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName='', + className='', + outputDirectory='', + seperateImageFolders=False, + minPartialToInclue = 0.70, + fieldName='spacefeat'): + + + chipSummaryList = [] + srcDS = srcDS = ogr.Open(srcVectorFile, 0) + srcLayer = srcDS.GetLayer() + + + shapeSrcList = [] + for shapeFileSrc in shapeFileSrcList: + print(shapeFileSrc[1]) + shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]]) + + ## determinePixelSize in Meters + print rasterFileList + print rasterFileList[0][0] + srcRaster = gdal.Open(rasterFileList[0][0]) + geoTransform = srcRaster.GetGeoTransform() + metersPerPix = abs(round(geoTransform[5]*111111,1)) + + imgId = 0 + for feature in srcLayer: + + geom = feature.GetGeometryRef() + + if geom.GetGeometryName != 'POINT': + geom = geom.Centroid() + + + spacenetFeatureName = feature.GetField('spaceNetFeature') + if seperateImageFolders: + className = spacenetFeatureName.replace(' ', '') + else: + className = '' + clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM'] + halfClipSizeXm = round((clipSize/metersPerPix)/2) + + xCenter = geom.GetX() + yCenter = geom.GetY() + + maxXCut = xCenter + halfClipSizeXm*geoTransform[1] + maxYCut = yCenter + abs(halfClipSizeXm*geoTransform[5]) + minYCut = yCenter - abs(halfClipSizeXm*geoTransform[5]) + minXCut = xCenter - halfClipSizeXm*geoTransform[1] + + imgId = imgId + 1 + + chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=[], + minpartialPerc=minPartialToInclue, + outputPrefix='', + createPix=False, + rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), + className=className, + baseName=baseName, + imgId=imgId) + + chipSummaryList.append(chipSummary) + + return chipSummaryList + + +def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): + + + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMax = latCutOff + + if lonCutOff == -500: + longMax = 500 + else: + longMax = lonCutOff + + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson, + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] + + subprocess.call(cmd) + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMin = latCutOff + latMax = 500 + + + if lonCutOff == -500: + longMax = 500 + else: + longMin = lonCutOff + longMax = 500 + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson, + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] + subprocess.call(cmd) + + + return outputGeoJsonTrain, outputGeoJsonTest + +def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'): + + + df = gpd.read_file(geoJsonIn, encoding=encoding) + df['y'] = df.geometry.map(lambda p: p.y) + df['x'] = df.geometry.map(lambda p: p.x) + df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True) + df.drop(['x', 'y'], 1, inplace=True) + + if geoJsonOut=='': + geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson') + df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding) + + return geoJsonOut + +def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'): + + insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)]) + df = gpd.read_file(geoJson, encoding=encoding) + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding) + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding) + + return outputGeoJsonTrain, outputGeoJsonTest + +if __name__ == '__main__': + + createOutVectorFile = True + srcVectorFile = '/path/To/PointOfInterestSummary.geojson' + outVectorFile = '/path/To/PolygonOfInterestSummary.geojson' + outputDirectory = '/path/To/processedDataPOI/' + baseName = 'AOI_3_Paris' + featureDescriptionJson = '../configFiles/OSM_Power.json' + className = 'POIAll' + seperateImageFolders = False + splitGeoJson = True + deduplicateGeoJsonFlag = True + + if deduplicateGeoJsonFlag: + srcVectorFile = deduplicateGeoJson(srcVectorFile) + + splitGeoJson_latMin = -22.94 + splitGeoJson_latMax = 90 + splitGeoJson_lonMin = -43.25 + splitGeoJson_lonMax = 180 + + srcVectorFileList = [[srcVectorFile, 'all']] + + if splitGeoJson: + srcVectorTrain, srcVectorTest = splitVectorFileGPD(srcVectorFile, + latMin=splitGeoJson_latMin, + latMax=splitGeoJson_latMax, + lonMin=splitGeoJson_lonMin, + lonMax=splitGeoJson_lonMax, + ) + + srcVectorFileList = [ + [srcVectorTrain, 'train'], + [srcVectorTest, 'test'] + ] + + + + + + # List of Raster Images to Chip and an appropriate label. + # This list will take any type of Raster supported by GDAL + # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one + # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing + rasterFileList = [['/path/To/Pan-VRT.vrt', 'PAN'], + ['/path/To/MUL-VRT.vrt', 'MUL'] + ['/path/To/RGB-PanSharpen-VRT.vrt', 'RGB-PanSharpen'] + ['/path/To/MUL-PanSharpen-VRT.vrt', 'MUL-PanSharpen'] + ] + + + + + ### Define Point of Interest Dictionary + # {'spacenetFeatureName': + # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping + # 'featureChipScaleM': 200, # Size of chip to create in meters + # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point + # # indicating in the geojson. This will be used in creating polygons for IOU scoring + # } + + with open(featureDescriptionJson, 'r') as j: + pointOfInterestList = json.load(j) + + # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size. + for srcVectorFile, folderType in srcVectorFileList: + outVectorFile = srcVectorFile.replace('.geojson', 'poly.shp') + if createOutVectorFile: + createPolygonShapeFileGPD(srcVectorFile, pointOfInterestList, outVectorFile=outVectorFile) + + outputDirectoryTmp = os.path.join(outputDirectory, folderType) + + shapeFileSrcList = [ + [outVectorFile, 'POIAll'] + ] + # create Folder Structure to place files into. + + for rasterFile in rasterFileList: + for keyName in pointOfInterestList.keys(): + tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', '')) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + for objectName in pointOfInterestList[keyName].keys(): + tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', ''), objectName.replace(" ","")) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + # create Processed Point of Interest Data. + createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName=baseName, + className='', + outputDirectory=outputDirectoryTmp, + seperateImageFolders=seperateImageFolders, + minPartialToInclue=0.70) + + + + + diff --git a/python/spaceNetUtilities/geoToolsGPD.py b/python/spaceNetUtilities/geoToolsGPD.py new file mode 100644 index 0000000..d39ff9d --- /dev/null +++ b/python/spaceNetUtilities/geoToolsGPD.py @@ -0,0 +1,1063 @@ +from osgeo import gdal, osr, ogr +import numpy as np +import os +import csv +import subprocess +import math +import geopandas as gpd +import shapely +import pandas as pd +import rasterio as rio +import affine as af +from shapely.geometry import Point +import pyproj +from shapely.geometry.polygon import Polygon +from shapely.geometry.multipolygon import MultiPolygon +from shapely.geometry.linestring import LineString +from shapely.geometry.multilinestring import MultiLineString +from functools import partial +try: + import rtree + import centerline + import osmnx +except: + print("rtree not installed, Will break evaluation code") + + +def import_summary_geojsonGPD(geojsonfilename, removeNoBuildings=True): + """read summary spacenetV2 geojson into geopandas dataFrame. + + Keyword arguments: + geojsonfilename -- geojson to read + removeNoBuildings -- remove all samples with BuildingId == -1 (default =True) + """ + + + buildingList_df = gpd.read_file(geojsonfilename) + + + if removeNoBuildings: + buildingList_df = buildingList_df[buildingList_df['BuildingId']!=-1] + + buildingList_df['poly'] = buildingList_df.geometry + + return buildingList_df + + +def import_chip_geojson(geojsonfilename, ImageId=''): + """read spacenetV2 chip geojson into geopandas dataFrame. + + Keyword arguments: + geojsonfilename -- geojson to read + ImageId -- Specify ImageId. If not specified. ImageId is defined by + os.path.splitext(os.path.basename(geojsonfilename))[0] + """ + + buildingList_df = gpd.read_file(geojsonfilename) + + datasource = ogr.Open(geojsonfilename, 0) + if ImageId=='': + ImageId = os.path.splitext(os.path.basename(geojsonfilename))[0] + + buildingList_df['ImageId']=ImageId + buildingList_df['BuildingId'] = range(1, len(buildingList_df) + 1) + buildingList_df['poly'] = buildingList_df.geometry #[shapely.wkt.loads(x) for x in buildingList_df.geometry.values] + + return buildingList_df + + +def mergePolyList(geojsonfilename): + """read geoJson and return dataframe of unary_union + + Keyword arguments: + geojsonfilename -- geojson to read + + """ + + + buildingList_df = gpd.read_file(geojsonfilename) + + + return buildingList_df.unary_union + +def readwktcsv(csv_path): + """read spacenetV2 csv and return geopandas dataframe + + Keyword arguments: + + csv_path -- path to csv of spacenetV2 ground truth or solution submission format + csv Format Expected = ['ImageId', 'BuildingId', 'PolygonWKT_Pix', 'PolygonWKT_Geo'] or + csv Format Expected = ['ImageId', 'BuildingId', 'PolygonWKT', 'Confidence'] + + see https://community.topcoder.com/longcontest/?module=ViewProblemStatement&rd=16892&pm=14551 to + learn more about the spacenetV2 csv formats + """ + # + + + df = pd.read_csv(csv_path) + crs={} + if 'PolygonWKT_Geo' in df.columns: + geometry = [shapely.wkt.loads(x) for x in df['PolygonWKT_Geo'].values] + crs = {'init': 'epsg:4326'} + elif 'PolygonWKT_Pix' in df.columns: + geometry = [shapely.wkt.loads(x) for x in df['PolygonWKT_Pix'].values] + elif 'PolygonWKT' in df.columns: + geometry = [shapely.wkt.loads(x) for x in df['PolygonWKT'].values] + + else: + print('Eror No Geometry Column detected, column must be called "PolygonWKT_Geo", "PolygonWKT_Pix", or "PolygonWKT"') + return -1 + + geo_df = gpd.GeoDataFrame(df, crs=crs, geometry=geometry) + + return geo_df + + +def exporttogeojson(geojsonfilename, geo_df): + """Write geopandas dataframe to geo_df + + Keyword arguments: + geojsonfilename -- geojson to create + geo_df -- geopandas dataframe + + """ + + #geo_df.to_file(geojsonfilename, driver='GeoJSON', crs=from_epsg(4326)) + geo_df.to_file(geojsonfilename, driver='GeoJSON') + + return geojsonfilename + + +def createmaskfrompolygons(polygons): + ## todo implement through label tools + pass + ## see labelTools createRasterFromGeoJson + + +def geomGeo2geomPixel(geom, affineObject=[], input_raster='', gdal_geomTransform=[]): + # This function transforms a shapely geometry in geospatial coordinates into pixel coordinates + # geom must be shapely geometry + # affineObject = rasterio.open(input_raster).affine + # gdal_geomTransform = gdal.Open(input_raster).GetGeoTransform() + # input_raster is path to raster to gather georectifcation information + if not affineObject: + if input_raster != '': + affineObject = rio.open(input_raster).affine + else: + affineObject = af.Affine.from_gdal(gdal_geomTransform) + + geomTransform = shapely.affinity.affine_transform(geom, + [affineObject.a, + affineObject.b, + affineObject.d, + affineObject.e, + affineObject.xoff, + affineObject.yoff] + ) + + return geomTransform + + +def geomPixel2geomGeo(geom, affineObject=[], input_raster='', gdal_geomTransform=[]): + # This function transforms a shapely geometry in pixel coordinates into geospatial coordinates + # geom must be shapely geometry + # affineObject = rasterio.open(input_raster).affine + # gdal_geomTransform = gdal.Open(input_raster).GetGeoTransform() + # input_raster is path to raster to gather georectifcation information + if not affineObject: + if input_raster != '': + affineObject = rio.open(input_raster).affine + else: + affineObject = af.Affine.from_gdal(gdal_geomTransform) + + affineObject = affineObject.__invert__() + geomTransform = shapely.affinity.affine_transform(geom, + [affineObject.a, + affineObject.b, + affineObject.d, + affineObject.e, + affineObject.xoff, + affineObject.yoff] + ) + + return geomTransform + + + +def returnBoundBox(xCenter, yCenter, pixDim, affineObject=[], input_raster='', gdal_geomTransform=[], pixelSpace=False): + + geom = Point(xCenter, yCenter) + geom = geom.buffer(pixDim) + geom = geom.envelope + + if not pixelSpace: + geom = geomPixel2geomGeo(geom, affineObject=affineObject, input_raster=input_raster, gdal_geomTransform=gdal_geomTransform) + else: + geom + + return geom + + +def createBoxFromLine(tmpGeom, ratio=1, halfWidth=-999, transformRequired=True, transform_WGS84_To_UTM='', transform_UTM_To_WGS84=''): + # create Polygon Box Oriented with the line + + if transformRequired: + if transform_WGS84_To_UTM == '': + transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(tmpGeom) + + tmpGeom = shapely.ops.tranform(transform_WGS84_To_UTM, tmpGeom) + + + + + # calculatuate Centroid + + lengthM = tmpGeom.length + if halfWidth ==-999: + halfWidth = lengthM/(2*ratio) + + polyGeom = tmpGeom.buffer(halfWidth, cap_style=shapely.geometry.CAP_STYLE.flat) + + angRad = math.atan2((tmpGeom.coords[1][1]-tmpGeom.coords[0][1], + tmpGeom.coords[1][0] - tmpGeom.coords[0][0])) + + areaM = polyGeom.area + + if transformRequired: + polyGeom = shapely.ops.tranform(transform_UTM_To_WGS84, polyGeom) + + + + return (polyGeom, areaM, angRad, lengthM) + + + + + +def geoWKTToPixelWKT(geom, inputRaster, targetSR, geomTransform, pixPrecision=2): + + + print('Deprecated') + ## shapely.wkt.dumps(geom, trim=pixPrecision) + ## newGeom = geom + ## newGeom.coords = np.round(np.array(geom.coords), pixPrecision) + + + + + +def create_rtreefromdict(buildinglist): + # create index + index = rtree.index.Index(interleaved=False) + for idx, building in enumerate(buildinglist): + index.insert(idx, building['poly'].GetEnvelope()) + + return index + +def data_gen_rtree(poly_list): + + for i, poly in enumerate(poly_list): + + yield (i, poly.bounds) + + +def create_rtree_from_poly(poly_list, shapely=True): + # create index + index = rtree.index.Index(interleaved=shapely) + index.Rtree('bulk', data_gen_rtree(poly_list)) + #for idx, building in enumerate(poly_list): + # index.insert(idx, building.GetEnvelope()) + + return index + + +def search_rtree(test_building, index): + # input test shapely polygon geometry and rtree index + if test_building.geom_type == 'Polygon' or \ + test_building.geom_type == 'MultiPolygon': + fidlist = index.intersection(test_building.bounds) + else: + fidlist = [] + + return fidlist + + +def get_envelope(poly): + + return poly.envelope + +def utm_getZone(longitude): + + return (int(1+(longitude+180.0)/6.0)) + + +def utm_isNorthern(latitude): + + if (latitude < 0.0): + return 0 + else: + return 1 + +def createUTMTransform(polyGeom): + + polyCentroid = polyGeom.centroid + utm_zone = utm_getZone(polyCentroid.x) + is_northern = utm_isNorthern(polyCentroid.y) + if is_northern: + directionIndicator = '+north' + else: + directionIndicator = '+south' + + projectTO_UTM = partial( + pyproj.transform, + pyproj.Proj("+proj=longlat +datum=WGS84 +no_defs"), #Proj(proj='latlong',datum='WGS84') + pyproj.Proj("+proj=utm +zone={} {} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(utm_zone, + directionIndicator)) + ) + + projectTO_WGS = partial( + pyproj.transform, + pyproj.Proj("+proj=utm +zone={} {} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(utm_zone, + directionIndicator) + ), + pyproj.Proj("+proj=longlat +datum=WGS84 +no_defs"), # Proj(proj='latlong',datum='WGS84') + + ) + + return projectTO_UTM, projectTO_WGS + + +def getRasterExtent(srcImage): + + poly = Polygon(((srcImage.bounds.left, srcImage.bounds.top), + (srcImage.bounds.right, srcImage.bounds.top), + (srcImage.bounds.right, srcImage.bounds.bottom), + (srcImage.bounds.left, srcImage.bounds.bottom)) + ) + + + + return srcImage.affine, \ + poly, \ + srcImage.bounds.left, \ + srcImage.bounds.top, \ + srcImage.bounds.right, \ + srcImage.bounds.bottom + +def createPolygonFromCenterPoint(cX,cY, radiusMeters, transform_WGS_To_UTM_Flag=True): + + + point = Point(cX, cY) + + transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(point) + if transform_WGS_To_UTM_Flag: + point = shapely.ops.tranform(transform_WGS84_To_UTM, point) + + poly = point.Buffer(radiusMeters) + + if transform_WGS_To_UTM_Flag: + poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly) + + return poly + + +def createPolygonFromCorners(left,bottom,right, top): + # Create ring + poly = Polygon((left, top), + (right, top), + (right, bottom), + (left, bottom) + ) + + return poly + + +def clipShapeFileGPD(geoDF, outputFileName, polyToCut, minpartialPerc=0.0, shapeLabel='Geo', debug=False): + # check if geoDF has origAreaField + outGeoJSon = os.path.splitext(outputFileName)[0] + '.geojson' + if not os.path.exists(os.path.dirname(outGeoJSon)): + os.makedirs(os.path.dirname(outGeoJSon)) + if debug: + print(outGeoJSon) + + if 'origarea' in geoDF.columns: + pass + else: + geoDF['origarea'] = geoDF.area + + + + cutGeoDF = geoDF[geoDF.intersetion(polyToCut).area/geoDF['origarea'] > minpartialPerc] + cutGeoDF['partialDec'] = cutGeoDF.area/cutGeoDF['origarea'] + cutGeoDF['truncated'] = cutGeoDF['partialDec']==1.0 + + ##TODO Verify this works in DockerBuild + cutGeoDF.to_file(outGeoJSon, driver='GeoJSON') + +def clipShapeFile(shapeSrc, outputFileName, polyToCut, minpartialPerc=0.0, shapeLabel='Geo', debug=False): + ## Deprecated + print('Deprecated use clipShapeFileGPD') + source_layer = shapeSrc.GetLayer() + source_srs = source_layer.GetSpatialRef() + # Create the output Layer + + outGeoJSon = os.path.splitext(outputFileName)[0] + '.geojson' + if not os.path.exists(os.path.dirname(outGeoJSon)): + os.makedirs(os.path.dirname(outGeoJSon)) + print(outGeoJSon) + outDriver = ogr.GetDriverByName("geojson") + if os.path.exists(outGeoJSon): + outDriver.DeleteDataSource(outGeoJSon) + + if debug: + outGeoJSonDebug = outputFileName.replace('.tif', 'outline.geojson') + outDriverDebug = ogr.GetDriverByName("geojson") + if os.path.exists(outGeoJSonDebug): + outDriverDebug.DeleteDataSource(outGeoJSonDebug) + outDataSourceDebug = outDriver.CreateDataSource(outGeoJSonDebug) + outLayerDebug = outDataSourceDebug.CreateLayer("groundTruth", source_srs, geom_type=ogr.wkbPolygon) + + outFeatureDebug = ogr.Feature(source_layer.GetLayerDefn()) + outFeatureDebug.SetGeometry(polyToCut) + outLayerDebug.CreateFeature(outFeatureDebug) + + + outDataSource = outDriver.CreateDataSource(outGeoJSon) + outLayer = outDataSource.CreateLayer("groundTruth", source_srs, geom_type=ogr.wkbPolygon) + # Add input Layer Fields to the output Layer + inLayerDefn = source_layer.GetLayerDefn() + for i in range(0, inLayerDefn.GetFieldCount()): + fieldDefn = inLayerDefn.GetFieldDefn(i) + outLayer.CreateField(fieldDefn) + outLayer.CreateField(ogr.FieldDefn("partialBuilding", ogr.OFTReal)) + outLayer.CreateField(ogr.FieldDefn("partialDec", ogr.OFTReal)) + outLayerDefn = outLayer.GetLayerDefn() + source_layer.SetSpatialFilter(polyToCut) + for inFeature in source_layer: + + outFeature = ogr.Feature(outLayerDefn) + + for i in range (0, inLayerDefn.GetFieldCount()): + outFeature.SetField(inLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) + + geom = inFeature.GetGeometryRef() + geomNew = geom.Intersection(polyToCut) + partialDec = -1 + if geomNew: + + if geomNew.GetGeometryName()=='POINT': + outFeature.SetField("partialDec", 1) + outFeature.SetField("partialBuilding", 1) + else: + + if geom.GetArea() > 0: + partialDec = geomNew.GetArea() / geom.GetArea() + else: + partialDec = 0 + + outFeature.SetField("partialDec", partialDec) + + if geom.GetArea() == geomNew.GetArea(): + outFeature.SetField("partialBuilding", 0) + else: + outFeature.SetField("partialBuilding", 1) + + + else: + outFeature.SetField("partialBuilding", 1) + outFeature.SetField("partialBuilding", 1) + + + outFeature.SetGeometry(geomNew) + if partialDec >= minpartialPerc: + outLayer.CreateFeature(outFeature) + #print ("AddFeature") + + +def cutChipFromMosaicGPD(rasterFileList, shapeFileSrcList, outlineSrc='',outputDirectory='', outputPrefix='clip_', + clipSizeMX=100, clipSizeMY=100, clipOverlap=0.0, minpartialPerc=0.0, createPix=False, + baseName='', + imgIdStart=-1, + parrallelProcess=False, + noBlackSpace=False, + randomClip=-1, + debug=False + ): + + srcImage = rio.open(rasterFileList[0][0]) + geoTrans, poly, ulX, ulY, lrX, lrY = getRasterExtent(srcImage) + # geoTrans[1] w-e pixel resolution + # geoTrans[5] n-s pixel resolution + if outputDirectory=="": + outputDirectory=os.path.dirname(rasterFileList[0][0]) + + rasterFileBaseList = [] + for rasterFile in rasterFileList: + rasterFileBaseList.append(os.path.basename(rasterFile[0])) + + if not createPix: + transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(poly) + poly = shapely.ops.tranform(transform_WGS84_To_UTM, poly) + + minX, minY, maxX, maxY = poly.bounds + + #return poly to WGS84 + if not createPix: + poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly) + + + shapeSrcList = [] + for shapeFileSrc in shapeFileSrcList: + print(shapeFileSrc[1]) + shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]]) + + + if outlineSrc == '': + geomOutline = poly + else: + outline = gpd.read_file(outlineSrc) + geomOutlineBase = outline.geometry[0] + geomOutline = geomOutlineBase.Intersection(poly) + + chipSummaryList = [] + + for rasterFile in rasterFileList: + if not os.path.exists(os.path.join(outputDirectory, rasterFile[1])): + os.makedirs(os.path.join(outputDirectory, rasterFile[1])) + idx = 0 + if createPix: + if debug: + print(geoTrans) + + clipSizeMX=clipSizeMX*geoTrans[1] + clipSizeMY=abs(clipSizeMY*geoTrans[5]) + + xInterval = np.arange(minX, maxX, clipSizeMX*(1.0-clipOverlap)) + if debug: + print('minY = {}'.format(minY)) + print('maxY = {}'.format(maxY)) + print('clipsizeMX ={}'.format(clipSizeMX)) + print('clipsizeMY ={}'.format(clipSizeMY)) + + yInterval = np.arange(minY, maxY, clipSizeMY*(1.0-clipOverlap)) + + if debug: + print(xInterval) + print(yInterval) + + for llX in xInterval: + + for llY in yInterval: + uRX = llX+clipSizeMX + uRY = llY+clipSizeMY + + # check if uRX or uRY is outside image + if noBlackSpace: + if uRX > maxX: + uRX = maxX + llX = maxX - clipSizeMX + if uRY > maxY: + uRY = maxY + llY = maxY - clipSizeMY + + polyCut = createPolygonFromCorners(llX, llY, uRX, uRY) + + + if not createPix: + + polyCut.Transform(transform_UTM_To_WGS84) + ## add debug line do cuts + if (polyCut).Intersects(geomOutline): + + if debug: + print("Do it.") + + minXCut = llX + minYCut = llY + maxXCut = uRX + maxYCut = uRY + + if debug: + print('minYCut = {}'.format(minYCut)) + print('maxYCut = {}'.format(maxYCut)) + print('minXCut = {}'.format(minXCut)) + print('maxXCut = {}'.format(maxXCut)) + print('clipsizeMX ={}'.format(clipSizeMX)) + print('clipsizeMY ={}'.format(clipSizeMY)) + + idx = idx+1 + if imgIdStart == -1: + imgId = -1 + else: + imgId = idx + + chipSummary = createclip(outputDirectory, rasterFileList, shapeSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=rasterFileBaseList, + minpartialPerc=minpartialPerc, + outputPrefix=outputPrefix, + createPix=createPix, + rasterPolyEnvelope=poly, + baseName=baseName, + imgId=imgId) + + chipSummaryList.append(chipSummary) + + return chipSummaryList + +def cutChipFromMosaic(rasterFileList, shapeFileSrcList, outlineSrc='',outputDirectory='', outputPrefix='clip_', + clipSizeMX=100, clipSizeMY=100, clipOverlap=0.0, minpartialPerc=0.0, createPix=False, + baseName='', + imgIdStart=-1, + parrallelProcess=False, + noBlackSpace=False, + randomClip=-1): + #rasterFileList = [['rasterLocation', 'rasterDescription']] + # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'], + # ['/path/to/8band_AOI_1.tif, '8band'] + # ] + # open Base Image + #print(rasterFileList[0][0]) + srcImage = gdal.Open(rasterFileList[0][0]) + geoTrans, poly, ulX, ulY, lrX, lrY = getRasterExtent(srcImage) + # geoTrans[1] w-e pixel resolution + # geoTrans[5] n-s pixel resolution + if outputDirectory=="": + outputDirectory=os.path.dirname(rasterFileList[0][0]) + + rasterFileBaseList = [] + for rasterFile in rasterFileList: + rasterFileBaseList.append(os.path.basename(rasterFile[0])) + + if not createPix: + transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(poly) + poly = shapely.ops.tranform(transform_WGS84_To_UTM, poly) + + env = poly.GetEnvelope() + minX = env[0] + minY = env[2] + maxX = env[1] + maxY = env[3] + + #return poly to WGS84 + if not createPix: + poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly) + + + shapeSrcList = [] + for shapeFileSrc in shapeFileSrcList: + print(shapeFileSrc[1]) + shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]]) + + + if outlineSrc == '': + geomOutline = poly + else: + outline = ogr.Open(outlineSrc) + layer = outline.GetLayer() + featureOutLine = layer.GetFeature(0) + geomOutlineBase = featureOutLine.GetGeometryRef() + geomOutline = geomOutlineBase.Intersection(poly) + + chipSummaryList = [] + + for rasterFile in rasterFileList: + if not os.path.exists(os.path.join(outputDirectory, rasterFile[1])): + os.makedirs(os.path.join(outputDirectory, rasterFile[1])) + idx = 0 + if createPix: + print(geoTrans) + clipSizeMX=clipSizeMX*geoTrans[1] + clipSizeMY=abs(clipSizeMY*geoTrans[5]) + + xInterval = np.arange(minX, maxX, clipSizeMX*(1.0-clipOverlap)) + print('minY = {}'.format(minY)) + print('maxY = {}'.format(maxY)) + print('clipsizeMX ={}'.format(clipSizeMX)) + print('clipsizeMY ={}'.format(clipSizeMY)) + + yInterval = np.arange(minY, maxY, clipSizeMY*(1.0-clipOverlap)) + print(xInterval) + print(yInterval) + for llX in xInterval: + if parrallelProcess: + for llY in yInterval: + pass + + else: + for llY in yInterval: + uRX = llX+clipSizeMX + uRY = llY+clipSizeMY + + # check if uRX or uRY is outside image + if noBlackSpace: + if uRX > maxX: + uRX = maxX + llX = maxX - clipSizeMX + if uRY > maxY: + uRY = maxY + llY = maxY - clipSizeMY + + + + + polyCut = createPolygonFromCorners(llX, llY, uRX, uRY) + + + + + + + + if not createPix: + + polyCut.Transform(transform_UTM_To_WGS84) + ## add debug line do cuts + if (polyCut).Intersects(geomOutline): + print("Do it.") + #envCut = polyCut.GetEnvelope() + #minXCut = envCut[0] + #minYCut = envCut[2] + #maxXCut = envCut[1] + #maxYCut = envCut[3] + + #debug for real + minXCut = llX + minYCut = llY + maxXCut = uRX + maxYCut = uRY + + #print('minYCut = {}'.format(minYCut)) + #print('maxYCut = {}'.format(maxYCut)) + #print('minXCut = {}'.format(minXCut)) + #print('maxXCut = {}'.format(maxXCut)) + + #print('clipsizeMX ={}'.format(clipSizeMX)) + #print('clipsizeMY ={}'.format(clipSizeMY)) + + + + idx = idx+1 + if imgIdStart == -1: + imgId = -1 + else: + imgId = idx + + chipSummary = createclip(outputDirectory, rasterFileList, shapeSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=rasterFileBaseList, + minpartialPerc=minpartialPerc, + outputPrefix=outputPrefix, + createPix=createPix, + rasterPolyEnvelope=poly, + baseName=baseName, + imgId=imgId) + chipSummaryList.append(chipSummary) + + return chipSummaryList + +def createclip(outputDirectory, rasterFileList, shapeSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=[], + minpartialPerc=0, + outputPrefix='', + createPix=False, + rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), + className='', + baseName='', + imgId=-1): + + #rasterFileList = [['rasterLocation', 'rasterDescription']] + # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'], + # ['/path/to/8band_AOI_1.tif, '8band'] + # ] + + polyCutWGS = createPolygonFromCorners(minXCut, minYCut, maxXCut, maxYCut) + + + if not rasterFileBaseList: + rasterFileBaseList = [] + for rasterFile in rasterFileList: + rasterFileBaseList.append(os.path.basename(rasterFile[0])) + + if rasterPolyEnvelope == '': + pass + + chipNameList = [] + for rasterFile in rasterFileList: + if className == '': + if imgId==-1: + chipNameList.append(outputPrefix + rasterFile[1] + + "_" + baseName + "_{}_{}.tif".format(minXCut, minYCut)) + else: + chipNameList.append(outputPrefix + rasterFile[1] + + "_" + baseName + "_img{}.tif".format(imgId)) + else: + if imgId==-1: + chipNameList.append(outputPrefix + className + "_" + + rasterFile[1] + "_" + baseName + "_{}_{}.tif".format(minXCut, minYCut)) + else: + chipNameList.append(outputPrefix + className + '_' + + rasterFile[1] + "_" + baseName + "_img{}.tif".format(imgId)) + + # clip raster + + for chipName, rasterFile in zip(chipNameList, rasterFileList): + outputFileName = os.path.join(outputDirectory, rasterFile[1], className, chipName) + ## Clip Image + print(rasterFile) + print(outputFileName) + subprocess.call(["gdalwarp", "-te", "{}".format(minXCut), "{}".format(minYCut), "{}".format(maxXCut), + "{}".format(maxYCut), + '-co', 'PHOTOMETRIC=rgb', + rasterFile[0], outputFileName]) + + baseLayerRasterName = os.path.join(outputDirectory, rasterFileList[0][1], className, chipNameList[0]) + outputFileName = os.path.join(outputDirectory, rasterFileList[0][1], chipNameList[0]) + + + ### Clip poly to cust to Raster Extent + if rasterPolyEnvelope.GetArea() == 0: + srcImage = gdal.Open(rasterFileList[0][0]) + geoTrans, rasterPolyEnvelope, ulX, ulY, lrX, lrY = getRasterExtent(srcImage) + polyVectorCut = polyCutWGS.Intersection(rasterPolyEnvelope) + else: + polyVectorCut = polyCutWGS.Intersection(rasterPolyEnvelope) + + # Interate thorough Vector Src List + for shapeSrc in shapeSrcList: + if imgId == -1: + outGeoJson = outputPrefix + shapeSrc[1] + \ + "_" + baseName + "_{}_{}.geojson".format(minXCut, minYCut) + else: + outGeoJson = outputPrefix + shapeSrc[1] + \ + "_" + baseName + "_img{}.geojson".format(imgId) + + outGeoJson = os.path.join(outputDirectory, 'geojson', shapeSrc[1], outGeoJson) + + clipShapeFile(shapeSrc[0], outGeoJson, polyVectorCut, minpartialPerc=minpartialPerc) + + + chipSummary = {'rasterSource': baseLayerRasterName, + 'chipName': chipNameList[0], + 'geoVectorName': outGeoJson, + 'pixVectorName': '' + } + + return chipSummary + +def cutChipFromRasterCenter(rasterFileList, shapeFileSrc, outlineSrc='', + outputDirectory='', outputPrefix='clip_', + clipSizeMeters=50, createPix=False, + classFieldName = 'TYPE', + minpartialPerc=0.1, + ): + #rasterFileList = [['rasterLocation', 'rasterDescription']] + # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'], + # ['/path/to/8band_AOI_1.tif, '8band'] + # ] + srcImage = gdal.Open(rasterFileList[0][0]) + geoTrans, poly, ulX, ulY, lrX, lrY = getRasterExtent(srcImage) + + if outputDirectory == "": + outputDirectory = os.path.dirname(rasterFileList[0]) + + rasterFileBaseList = [] + for rasterFile in rasterFileList: + rasterFileBaseList.append(os.path.basename(rasterFile[0])) + + transform_WGS84_To_UTM, transform_UTM_To_WGS84, utm_cs = createUTMTransform(poly) + poly.Transform(transform_WGS84_To_UTM) + env = poly.GetEnvelope() + + # return poly to WGS84 + poly.Transform(transform_UTM_To_WGS84) + + shapeSrc = ogr.Open(shapeFileSrc, 0) + if outlineSrc == '': + geomOutline = poly + else: + outline = ogr.Open(outlineSrc) + layer = outline.GetLayer() + featureOutLine = layer.GetFeature(0) + geomOutlineBase = featureOutLine.GetGeometryRef() + geomOutline = geomOutlineBase.Intersection(poly) + + shapeSrcBase = ogr.Open(shapeFileSrc, 0) + layerBase = shapeSrcBase.GetLayer() + layerBase.SetSpatialFilter(geomOutline) + for rasterFile in rasterFileList: + if not os.path.exists(os.path.join(outputDirectory, rasterFile[1])): + os.makedirs(os.path.join(outputDirectory, rasterFile[1])) + for feature in layerBase: + featureGeom = feature.GetGeometryRef() + cx, cy, cz = featureGeom.Centroid().GetPoint() + polyCut = createPolygonFromCenterPoint(cx, cy, radiusMeters=clipSizeMeters) + print(classFieldName) + classDescription = feature.GetField(classFieldName) + classDescription = classDescription.replace(" ","") + envCut = polyCut.GetEnvelope() + minXCut = envCut[0] + minYCut = envCut[2] + maxXCut = envCut[1] + maxYCut = envCut[3] + createclip(outputDirectory, rasterFileList, shapeSrc, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=rasterFileBaseList, + minpartialPerc=minpartialPerc, + outputPrefix=outputPrefix, + createPix=createPix, + rasterPolyEnvelope=poly, + className=classDescription) + + + +def rotateClip(clipFileName, sourceGeoJson, rotaionList=[0,90,180,275]): + # will add "_{}.ext".formate(rotationList[i] + pass + + + +def createMaskedMosaic(input_raster, output_raster, outline_file): + + subprocess.call(["gdalwarp", "-q", "-cutline", outline_file, "-of", "GTiff", input_raster, output_raster, + '-wo', 'OPTIMIZE_SIZE=YES', + '-co', 'COMPRESS=JPEG', + '-co', 'PHOTOMETRIC=YCBCR', + '-co', 'TILED=YES']) + + + +def explodeGeoPandasFrame(inGDF): + + #This function splits entries with MultiPolygon geometries into Polygon Geometries + + outdf = gpd.GeoDataFrame(columns=inGDF.columns) + for idx, row in inGDF.iterrows(): + if type(row.geometry) == Polygon: + outdf = outdf.append(row,ignore_index=True) + if type(row.geometry) == MultiPolygon: + multdf = gpd.GeoDataFrame(columns=inGDF.columns) + recs = len(row.geometry) + multdf = multdf.append([row]*recs,ignore_index=True) + for geom in range(recs): + multdf.loc[geom,'geometry'] = row.geometry[geom] + multdf.head() + outdf = outdf.append(multdf,ignore_index=True) + + if type(row.geometry) == LineString: + outdf = outdf.append(row, ignore_index=True) + + if type(row.geometry) == MultiLineString: + multdf = gpd.GeoDataFrame(columns=inGDF.columns) + recs = len(row.geometry) + multdf = multdf.append([row]*recs,ignore_index=True) + for geom in range(recs): + multdf.loc[geom,'geometry'] = row.geometry[geom] + multdf.head() + outdf = outdf.append(multdf,ignore_index=True) + + + outdf.crs = inGDF.crs + + + return outdf + +def calculateCenterLineFromGeopandasPolygon(inGDF, + centerLineDistanceInput_Meters=5, + simplifyDistanceMeters=5, + projectToUTM=True): + + # project To UTM for GeoSpatial Measurements + if projectToUTM: + tmpGDF = osmnx.project_gdf(inGDF) + else: + tmpGDF = inGDF + + # Explode GeoPandas + tmpGDF1 = explodeGeoPandasFrame(tmpGDF) + tmpGDF1.crs = tmpGDF.crs + gdf_centerline_utm = tmpGDF1 + + + # Loop through Geomertries to calculate Centerline for Each Polygon + listOfGeoms = tmpGDF1['geometry'].values + lineStringList = [] + + for geom in listOfGeoms: + tmpGeom = centerline.Centerline(geom, centerLineDistanceInput_Meters) + lineStringList.append(tmpGeom.createCenterline()) + + gdf_centerline_utm['geometry'] = lineStringList + + lineList = gdf_centerline_utm['geometry'].values + lineSimplifiedList = [] + + for geo in lineList: + + + if geo.type == 'MultiLineString': + + geoNew = shapely.ops.linemerge(geo).simplify(simplifyDistanceMeters, preserve_topology=False) + + else: + + geoNew = geo.simplify(simplifyDistanceMeters, preserve_topology=False) + + lineSimplifiedList.append(geoNew) + + simplifiedGdf_utm = gpd.GeoDataFrame({'geometry': lineSimplifiedList}) + simplifiedGdf_utm.crs = tmpGDF.crs + print (tmpGDF.crs) + + if projectToUTM: + gdf_simple_centerline = simplifiedGdf_utm.to_crs(inGDF.crs) + else: + gdf_simple_centerline = simplifiedGdf_utm + + + return gdf_simple_centerline + + +def calculateCenterLineFromOGR(inputSrcFile, centerLineDistanceInput_Meters=5, outputShpFile=''): + + inGDF = gpd.read_file(inputSrcFile) + outGDF = calculateCenterLineFromGeopandasPolygon(inGDF, centerLineDistanceInput_Meters=centerLineDistanceInput_Meters) + + if outputShpFile != '': + outGDF.to_file(outputShpFile) + + + return outGDF + + +def createBufferGeoPandas(inGDF, bufferDistanceMeters=5, bufferRoundness=1, projectToUTM=True): + # Calculate CenterLine + ## Define Buffer Constraints + + + # Transform gdf Roadlines into UTM so that Buffer makes sense + if projectToUTM: + tmpGDF = osmnx.project_gdf(inGDF) + else: + tmpGDF = inGDF + + gdf_utm_buffer = tmpGDF + + # perform Buffer to produce polygons from Line Segments + gdf_utm_buffer['geometry'] = tmpGDF.buffer(bufferDistanceMeters, + bufferRoundness) + + gdf_utm_dissolve = gdf_utm_buffer.dissolve(by='class') + gdf_utm_dissolve.crs = gdf_utm_buffer.crs + + if projectToUTM: + gdf_buffer = gdf_utm_dissolve.to_crs(inGDF.crs) + else: + gdf_buffer = gdf_utm_dissolve + + + return gdf_buffer + + diff --git a/python/spaceNetUtilities/inferenceTools.py b/python/spaceNetUtilities/inferenceTools.py new file mode 100644 index 0000000..e69de29 diff --git a/python/spaceNetUtilities/rasterTools.py b/python/spaceNetUtilities/rasterTools.py new file mode 100644 index 0000000..e69de29 From ef0ae9e2b52516ed6f4a8b23a2a81165d765288e Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Mon, 21 Aug 2017 14:21:58 -0400 Subject: [PATCH 23/26] updates to geopandas support --- configFiles/OSM_Power.json | 83 ++++ content/.DS_Store | Bin 6148 -> 6148 bytes python/createSpaceNetPOI_Round2.py | 9 +- python/createSpaceNetPOI_Round2GPD_clean.py | 355 ++++++++++++++++ python/evaluateScene.py | 12 - python/evaluateScene_Total.py | 357 ++++++++++++++++ python/spaceNetUtilities/__init__.pyc | Bin 177 -> 176 bytes .../createSpaceNetPOI_Round2GPD.py | 380 ++++++++++++++++++ python/spaceNetUtilities/evalGraphTools.pyc | Bin 0 -> 8777 bytes python/spaceNetUtilities/evalTools.pyc | Bin 4332 -> 4332 bytes python/spaceNetUtilities/evalToolsGPD.ipynb | 131 ++++++ python/spaceNetUtilities/evalToolsGPD.py | 157 ++++++++ python/spaceNetUtilities/evalToolsGPD.pyc | Bin 0 -> 4120 bytes python/spaceNetUtilities/geoTools.pyc | Bin 27895 -> 31690 bytes python/spaceNetUtilities/geoToolsGPD.py | 94 +++-- python/spaceNetUtilities/geoToolsGPD.pyc | Bin 0 -> 26853 bytes python/spaceNetUtilities/inferenceTools.py | 156 +++++++ python/spaceNetUtilities/labelTools.pyc | Bin 30263 -> 31101 bytes python/spaceNetUtilities/vizTools.pyc | Bin 0 -> 1385 bytes python/testFile.py | 22 + testfile.geojson | 7 + testfile1.geojson | 7 + testfile2.geojson | 7 + 23 files changed, 1733 insertions(+), 44 deletions(-) create mode 100644 python/createSpaceNetPOI_Round2GPD_clean.py create mode 100644 python/evaluateScene_Total.py create mode 100644 python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py create mode 100644 python/spaceNetUtilities/evalGraphTools.pyc create mode 100644 python/spaceNetUtilities/evalToolsGPD.ipynb create mode 100644 python/spaceNetUtilities/evalToolsGPD.py create mode 100644 python/spaceNetUtilities/evalToolsGPD.pyc create mode 100644 python/spaceNetUtilities/geoToolsGPD.pyc create mode 100644 python/spaceNetUtilities/vizTools.pyc create mode 100644 python/testFile.py create mode 100644 testfile.geojson create mode 100644 testfile1.geojson create mode 100644 testfile2.geojson diff --git a/configFiles/OSM_Power.json b/configFiles/OSM_Power.json index e69de29..5551f26 100644 --- a/configFiles/OSM_Power.json +++ b/configFiles/OSM_Power.json @@ -0,0 +1,83 @@ +{ +"power": + { + "compensator": { + "featureIdNum": 1, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5, + "spaceFeatureName": "power:compensator" + }, + "converter": { + "featureIdNum": 2, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5, + "spaceFeatureName": "power:converter" + }, + "heliostat": { + "featureIdNum": 3, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5, + "spaceFeatureName": "power:heliostat" + }, + "insulator": { + "featureIdNum": 4, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10, + "spaceFeatureName": "power:insualtor" + }, + "pole": { + "featureIdNum": 5, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5, + "spaceFeatureName": "power:pole" + }, + "portal": { + "featureIdNum": 6, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10, + "spaceFeatureName": "power:portal" + }, + "catenary_mast": { + "featureIdNum": 7, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10, + "spaceFeatureName": "power:catenary_mast" + }, + "substation": { + "featureIdNum": 8, + "featureChipScaleM": 200, + "featureBBoxSizeM": 25, + "spaceFeatureName": "power:substation" + }, + "switch": { + "featureIdNum": 9, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5, + "spaceFeatureName": "power:switch" + }, + "terminal": { + "featureIdNum": 10, + "featureChipScaleM": 200, + "featureBBoxSizeM": 5, + "spaceFeatureName": "power:terminal" + }, + "tower": { + "featureIdNum": 11, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10, + "spaceFeatureName": "power:tower" + }, + "transformer": { + "featureIdNum": 12, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10, + "spaceFeatureName": "power:transformer" + }, + "unknown": { + "featureIdNum": 13, + "featureChipScaleM": 200, + "featureBBoxSizeM": 10, + "spaceFeatureName": "power:unknown" + } + } +} diff --git a/content/.DS_Store b/content/.DS_Store index 3ef473b468fcfc586bb088bd85915cf28e89f1ba..18f17885bc1e1113e5a170794c21aa459dd3a254 100644 GIT binary patch delta 65 zcmZoMXfc=|#>CJ*u~2NHo+2aD!~pBb1|lqz`5B`&FJnw(o4kQ>?`C!meh#3T%?la7 VGf(ChF=S+zY{MhHIYwj!GXQbX5~ctE delta 108 zcmZoMXfc=|#>B`mu~2NHo+2aj!~p9_j154#Csvn}!IL4MA(X+0L64z?A(J7EA&!ef%pLsv;60: + utm_crs, latlong_crs = gT.createUTMandLatLonCrs(gdfPOITemp.geometry.values[0]) + # "compensator": { + # "featureIdNum": 1, + # "featureChipScaleM": 200, + # "featureBBoxSizeM": 5 + # }, + # project subset to UTM so that actions can be done with regard to meters + gdfPOITempUTM = gdfPOITemp.to_crs(utm_crs) + # buffer geometry to create circle around point with x meters + gdfPOITempUTM.geometry=gdfPOITempUTM.buffer(pointOfInterestList[pointOfInterestKey][objectName]['featureBBoxSizeM']) + # calculate the envelope of the circle to create a bounding box for each object + gdfPOITempUTM.geometry=gdfPOITempUTM.envelope + + # reporject to wgs84 (lat long) + gdfPOITemp = gdfPOITempUTM.to_crs(latlong_crs) + # assign a name to spacefeat for future use in the vector: "power:tower" + gdfPOITemp['spacefeat']=pointOfInterestList[pointOfInterestKey][objectName]['spaceFeatureName'] + + # if first key, create final gdf + if firstKey: + gdfFinal = gdfPOITemp + firstKey = False + else: + # else add new to end of gdfFinal + gdfFinal = gdfFinal.concat(gdfPOITemp) + + if outVectorFile != '': + gdfFinal.to_file(outVectorFile) + + return gdfFinal + + +def createProcessedPOIDataGPD(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName='', + className='', + outputDirectory='', + seperateImageFolders=False, + minPartialToInclue = 0.70, + defaultfeatureChipScaleM=200, + ): + + fieldName = pointOfInterestList.keys()[0] + fieldOfInterestList = pointOfInterestList[fieldName] + chipSummaryList = [] + + + ## determinePixelSize in Meters + print rasterFileList + print rasterFileList[0][0] + with rasterio.open(rasterFileList[0][0]) as src: + geoTransform = src.affine.to_gdal() + + metersPerPix = abs(round(geoTransform[5]*111111,1)) + + ## process pointShapeFile + srcDf = gpd.read_file(srcVectorFile) + + ## check if fieldName is valid otherwise features are detectable: + if fieldName in srcDf.columns: + print("{} is detected, processing srcFile = {}".format(fieldName, srcVectorFile)) + else: + print("Error {} is not a valid column Name, unable to process srcFile = {}".format(fieldName, srcVectorFile)) + print("Error Field name= {} is not in column names = {}". format(fieldName, srcDf.columns)) + print("Ensure {} is a column name, unable to process srcFile = {}".format(fieldName, srcVectorFile)) + + return -1 + + utm_crs, latlong_crs = gT.createUTMandLatLonCrs(srcDf.centroid.values[0]) + + ## not sure if needed + #gdf_utm = srcDf.to_crs(utm_crs) + #halfClipSizeXm = round((clipSizeM/metersPerPix)/2) + #gdf_utm.buffer(halfClipSizeXm).envelope + + + + + imgId = 0 + for idx, feature in srcDf.iterrows(): + + geom = feature.geometry.centroid + + + featureName = feature[fieldName] + + if seperateImageFolders: + className = featureName.replace(' ', '') + else: + className = '' + if featureName in fieldOfInterestList.keys(): + clipSize = fieldOfInterestList[featureName]['featureChipScaleM'] + else: + clipSize = defaultfeatureChipScaleM + + + halfClipSizeXm = round((clipSize/metersPerPix)/2) + + + maxXCut = geom.x + halfClipSizeXm*geoTransform[1] + maxYCut = geom.y + abs(halfClipSizeXm*geoTransform[5]) + minYCut = geom.y - abs(halfClipSizeXm*geoTransform[5]) + minXCut = geom.x - halfClipSizeXm*geoTransform[1] + + imgId = imgId + 1 + + chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeFileSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=[], + minpartialPerc=minPartialToInclue, + outputPrefix='', + createPix=False, + rasterPolyEnvelope=shapely.wkt.loads('POLYGON EMPTY'), + className=className, + baseName=baseName, + imgId=imgId) + + chipSummaryList.append(chipSummary) + + return chipSummaryList + + +def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): + + + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMax = latCutOff + + if lonCutOff == -500: + longMax = 500 + else: + longMax = lonCutOff + + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson, + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] + + subprocess.call(cmd) + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMin = latCutOff + latMax = 500 + + + if lonCutOff == -500: + longMax = 500 + else: + longMin = lonCutOff + longMax = 500 + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson, + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] + subprocess.call(cmd) + + + return outputGeoJsonTrain, outputGeoJsonTest + +def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'): + + + df = gpd.read_file(geoJsonIn, encoding=encoding) + df['y'] = df.geometry.map(lambda p: p.y) + df['x'] = df.geometry.map(lambda p: p.x) + df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True) + df.drop(['x', 'y'], 1, inplace=True) + + if geoJsonOut=='': + geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson') + df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding) + + return geoJsonOut + +def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'): + + insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)]) + df = gpd.read_file(geoJson, encoding=encoding) + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding) + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding) + + return outputGeoJsonTrain, outputGeoJsonTest + + + + +def processPointShapeFile(): + + pass + + + +if __name__ == '__main__': + + createOutVectorFile = True + srcVectorFile = '/path/To/PointOfInterestSummary.geojson' + outVectorFile = '/path/To/PolygonOfInterestSummary.geojson' + outputDirectory = '/path/To/processedDataPOI/' + baseName = 'AOI_3_Paris' + featureDescriptionJson = '../configFiles/OSM_Power.json' + className = 'POIAll' + seperateImageFolders = True + splitGeoJson = False + + + + + srcVectorFileList = [[srcVectorFile, 'all']] + + # List of Raster Images to Chip and an appropriate label. + # This list will take any type of Raster supported by GDAL + # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one + # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing + rasterFileList = [['/path/To/Pan-VRT.vrt', 'PAN'], + ['/path/To/MUL-VRT.vrt', 'MUL'] + ['/path/To/RGB-PanSharpen-VRT.vrt', 'RGB-PanSharpen'] + ['/path/To/MUL-PanSharpen-VRT.vrt', 'MUL-PanSharpen'] + ] + + + + + ### Define Point of Interest Dictionary + # {'spacenetFeatureName': + # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping + # 'featureChipScaleM': 200, # Size of chip to create in meters + # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point + # # indicating in the geojson. This will be used in creating polygons for IOU scoring + # } + + with open(featureDescriptionJson, 'r') as j: + pointOfInterestList = json.load(j) + + + # create Folder Structure to place files into. + + for rasterFile in rasterFileList: + for keyName in pointOfInterestList.keys(): + tmpPath = os.path.join(outputDirectory, rasterFile[1], keyName.replace(' ', '')) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + for objectName in pointOfInterestList[keyName].keys(): + tmpPath = os.path.join(outputDirectory, rasterFile[1], keyName.replace(' ', ''), objectName.replace(" ","")) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + # create Processed Point of Interest Data. + createProcessedPOIDataGPD(srcVectorFile, pointOfInterestList, rasterFileList, srcVectorFileList, + baseName=baseName, + className='', + outputDirectory=outputDirectory, + seperateImageFolders=seperateImageFolders, + minPartialToInclue=0.70) + + + + + diff --git a/python/evaluateScene.py b/python/evaluateScene.py index 6d0bb04..cfbdefc 100644 --- a/python/evaluateScene.py +++ b/python/evaluateScene.py @@ -321,18 +321,6 @@ def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList, - - - - - - - - - - - - if __name__ == "__main__": parser = argparse.ArgumentParser(description='Evaluate Score for SpaceNet') diff --git a/python/evaluateScene_Total.py b/python/evaluateScene_Total.py new file mode 100644 index 0000000..9072a71 --- /dev/null +++ b/python/evaluateScene_Total.py @@ -0,0 +1,357 @@ +from spaceNetUtilities import evalTools as eT +from spaceNetUtilities import geoToolsGPD as gT +import numpy as np +import csv +import multiprocessing +import time +import argparse +import os +import glob +import geopandas as gpd +from osgeo import ogr, osr, gdal + + +def writeAOISummaryToCSV(resultsDict,csvwriter): + + csvwriter.writerow(['TruthFile', resultsDict['TruthFile']]) + csvwriter.writerow(['ProposalFile', resultsDict['ProposalFile']]) + csvwriter.writerow(['AOI_Name', resultsDict['AOI_Name']]) + csvwriter.writerow(['Summary Results']) + csvwriter.writerow(['F1Score Total', resultsDict['F1ScoreTotal']]) + csvwriter.writerow(['Precision', resultsDict['PrecisionTotal']]) + csvwriter.writerow(['Recall', resultsDict['RecalTotal']]) + csvwriter.writerow(['True Positive Total', resultsDict['TruePositiveTotal']]) + csvwriter.writerow(['False Positive Total', resultsDict['FalsePositiveTotal']]) + csvwriter.writerow(['False Negative Total', resultsDict['FalseNegativeTotal']]) + csvwriter.writerow(['']) + + # resultsDict = {'AOI_Name': aoiName + # 'TruthFile': truth_fp, + # 'ProposalFile': test_fp, + # 'F1ScoreTotal': F1ScoreTotal, + # 'PrecisionTotal': precision, + # 'RecalTotal': recall, + # 'TruePositiveTotal': true_pos_total, + # 'FalsePositiveTotal': false_pos_total, + # 'FalseNegativeTotal': false_neg_total, + # 'PerImageStatsResultList': result_list, + # 'OutputSummaryFile': resultsOutputFile} + + + + + + + + + + +def evaluateSpaceNetSolution_Total(summaryTruthFile, summaryProposalFile, resultsOutputFile='', + useParallelProcessing=False, minPolygonSize=0, + iouThreshold=0.5, + AOIList=['Total', + 'AOI_1_Rio', + 'AOI_2_Vegas', + 'AOI_3_Paris', + 'AOI_4_Shanghai', + 'AOI_5_Khartoum'], + debug=False + ): + + truth_fp = summaryTruthFile + test_fp = summaryProposalFile + # check for cores available + if useParallelProcessing: + + max_cpu = multiprocessing.cpu_count() + parallel = True + else: + max_cpu = 1 + parallel = False + + # initialize scene counts + true_pos_counts = [] + false_pos_counts = [] + false_neg_counts = [] + + t0 = time.time() + # Start Ingest Of Truth and Test Case + + truthDF = gpd.read_file(truth_fp) + testDF = gpd.read_file(test_fp) + polyFlag = 'poly' + + ## TODO projectToUTM + + truthDF = truthDF.loc[truthDF.area >= minPolygonSize] + + t1 = time.time() + total = t1 - t0 + if debug: + print('time of ingest: ', total) + + + + prop_polysPoly = testDF.geometry.values + sol_polysPoly = truthDF.geometry.values + bad_count = 0 + F1ScoreList = [] + cpu_count = min(multiprocessing.cpu_count(), max_cpu) + if debug: + print('{}'.format(max_cpu)) + p = multiprocessing.Pool(processes=cpu_count) + ResultList = [] + + truthIndex = truthDF.sindex + eval_function_input_list = [['ImageId', prop_polysPoly, sol_polysPoly, truthIndex]] + + + # Calculate Values + t3 = time.time() + if debug: + print('time For DataCreation {}s'.format(t3 - t1)) + + # result_list = p.map(eT.evalfunction, eval_function_input_list) + if parallel == False: + result_list = [] + for eval_input in eval_function_input_list: + if resultsOutputFile != '': + result_list.append(eT.evalfunction(eval_input, + resultGeoJsonName=os.path.splitext(resultsOutputFile)[0]+"_"+eval_input[0]+".geojson", + threshold=iouThreshold)) + else: + result_list.append(eT.evalfunction(eval_input, + threshold=iouThreshold)) + else: + result_list = p.map(eT.evalfunction, eval_function_input_list) + + result_listNP = np.asarray([item[0] for item in result_list]) + result_listName = [item[1] for item in result_list] + AOIIndexList = [] + resultsDictList = [] + for AOI in AOIList: + if AOI !='Total': + AOIIndex = [i for i, s in enumerate(result_listName) if AOI in s] + AOIIndexList.append(AOIIndex) + result_sum = np.sum(result_listNP[AOIIndex], axis=0) + else: + AOIIndex = [i for i, s in enumerate(result_listName) if '' in s] + AOIIndexList.append(AOIIndex) + result_sum = np.sum(result_listNP, axis=0) + + + #result_sum = np.sum(result_listNP, axis=0) + true_pos_total = result_sum[1] + false_pos_total = result_sum[2] + false_neg_total = result_sum[3] + if (float(true_pos_total) + float(false_pos_total)) > 0: + precision = float(true_pos_total) / (float(true_pos_total) + float(false_pos_total)) + else: + precision = 0 + + if (float(true_pos_total) + float(false_neg_total)) > 0: + recall = float(true_pos_total) / (float(true_pos_total) + float(false_neg_total)) + else: + recall = 0 + + if (precision + recall) > 0: + F1ScoreTotal = 2.0 * precision * recall / (precision + recall) + else: + F1ScoreTotal = 0 + + resultsDict = {'AOI_Name': AOI, + 'TruthFile': truth_fp, + 'ProposalFile': test_fp, + 'F1ScoreTotal': F1ScoreTotal, + 'PrecisionTotal': precision, + 'RecalTotal': recall, + 'TruePositiveTotal': true_pos_total, + 'FalsePositiveTotal': false_pos_total, + 'FalseNegativeTotal': false_neg_total, + 'PerImageStatsResultList': result_list, + 'OutputSummaryFile': resultsOutputFile} + + resultsDictList.append(resultsDict) + writeResultsToScreen(resultsDict) + + + + + if resultsOutputFile != '': + with open(resultsOutputFile, 'w') as csvFile: + csvwriter = csv.writer(csvFile, delimiter=',') + for resultsDict in resultsDictList: + writeAOISummaryToCSV(resultsDict, csvwriter) + + writePerChipToCSV(resultsDictList, csvwriter) + + + + + + + + return resultsDictList + + +def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList, + AOIList=['Total', + 'AOI_1_Rio', + 'AOI_2_Vegas', + 'AOI_3_Paris', + 'AOI_4_Shanghai', + 'AOI_5_Khartoum'], + removeGeoJsonAfter=True): + srcBaseName = os.path.splitext(baseName)[0] + geoJsonList = glob.glob(srcBaseName+"_*.geojson") + print geoJsonList + + rasterList = [] + for rasterLocation in rasterLocationList: + rasterList.extend(glob.glob(os.path.join(rasterLocation, '*.tif'))) + + + + for AOI in AOIList: + AOIgeoJsonList = [s for i, s in enumerate(geoJsonList) if AOI in s] + AOIimageList = [s for i, s in enumerate(rasterList) if AOI in s] + + #print AOIimageList + outShapeFile = srcBaseName+"_"+AOI+"Summary.shp" + outDriver = ogr.GetDriverByName("ESRI Shapefile") + # Remove output shapefile if it already exists + if os.path.exists(outShapeFile): + outDriver.DeleteDataSource(outShapeFile) + outDataSource = outDriver.CreateDataSource(outShapeFile) + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + outLayer = outDataSource.CreateLayer("AOI_Results", srs, geom_type=ogr.wkbPolygon) + + inDataSource = ogr.Open(geoJsonList[0], 0) + inLayer = inDataSource.GetLayer() + # Add input Layer Fields to the output Layer + inLayerDefn = inLayer.GetLayerDefn() + for i in range(0, inLayerDefn.GetFieldCount()): + fieldDefn = inLayerDefn.GetFieldDefn(i) + outLayer.CreateField(fieldDefn) + # Get the output Layer's Feature Definition + outLayerDefn = outLayer.GetLayerDefn() + + inLayerDefn = 0 + inLayer = 0 + inDataSource = 0 + for AOIgeoJson in AOIgeoJsonList: + imageId = AOIgeoJson.replace(srcBaseName+'_', "").replace('.geojson', '') + rasterName = [s for i, s in enumerate(AOIimageList) if imageId in s] + + if len(rasterName)>0: + rasterName = rasterName[0] + #print rasterName + inDataSource = ogr.Open(AOIgeoJson, 0) + inLayer = inDataSource.GetLayer() + + targetRaster = gdal.Open(rasterName) + geomTransform = targetRaster.GetGeoTransform() + + for i in range(0, inLayer.GetFeatureCount()): + # Get the input Feature + inFeature = inLayer.GetFeature(i) + # Create output Feature + outFeature = ogr.Feature(outLayerDefn) + # Add field values from input Layer + for i in range(0, outLayerDefn.GetFieldCount()): + outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) + # Set geometry as centroid + geom = inFeature.GetGeometryRef() + #print geom.ExportToWkt() + # [GeoWKT, PixWKT]) + geomList = gT.pixelGeomToGeoGeom(geom, rasterName, targetSR='', geomTransform='', breakMultiPolygonPix=False) + #print geomList[0][0].ExportToWkt() + if geomList: + outFeature.SetGeometry(geomList[0][0]) + + # Add new feature to output Layer + outLayer.CreateFeature(outFeature) + outFeature = None + inFeature = None + + + + + if removeGeoJsonAfter: + for f in geoJsonList: + os.remove(f) + + + +if __name__ == "__main__": + + parser = argparse.ArgumentParser(description='Evaluate Score for SpaceNet') + parser.add_argument("summaryTruthFile", + help="The Location of Summary Ground Truth csv File" + "Summary File should have a header = ImageId, BuildingId, polygonPixWKT, polygonGeoPix " + "Format is '{},{},{},{}.format(ImageId, BuildingId, polygonPixWKT, polygonGeoPix)'," + "unless --geoJson flag is set" + "See spaceNet competition details for more information about file format" + ) + parser.add_argument("summaryProposalFile", + help="The Location of summary Propsal csv File" + "Summary File should have a header = ImageId, BuildingId, polygonPixWKT, Confidence " + "followed by values" + "Format is '{},{},{},{}.format(ImageId, BuildingId, polygonPixWKT, Confidence)'" + "unless --geoJson flag is set" + ) + parser.add_argument("--polygonMinimumPixels", + help="The minimum number of pixels a polygon must have to be considered valid" + "The minimum for spacenet round 2 is 20 pixels", + type=int, + default=20) + parser.add_argument("--iouThreshold", + help="The IOU threshold for a True Positive" + "Spacenet uses 0.5", + type=float, + default=0.5) + + parser.add_argument("--resultsOutputFile", + help="If you would like summary data outwritten to a file, specify the file", + default='') + parser.add_argument("--geoJson", + help='results Submitted are in geoJson Format', + action='store_true') + + parser.add_argument("--useParallelProcessing", + help='Convert Image from Native format to 8bit', + action='store_true') + + parser.add_argument("--rasterLocation", + help='Image Directory List', + action='append', + default=[] + ) + + args = parser.parse_args() + # load Truth and Test File Locations + AOIList = ['Total', + 'AOI_1_Rio', + 'AOI_2_Vegas', + 'AOI_3_Paris', + 'AOI_4_Shanghai', + 'AOI_5_Khartoum'] + resultsOutputFile = args.resultsOutputFile + summaryDict = evaluateSpaceNetSolution_Total(args.summaryTruthFile, + args.summaryProposalFile, + resultsOutputFile=resultsOutputFile, + processgeoJson=args.geoJson, + useParallelProcessing=args.useParallelProcessing, + minPolygonSize=args.polygonMinimumPixels, + iouThreshold=args.iouThreshold, + AOIList=AOIList) + + + + if resultsOutputFile != '': + combineGeoJsonAndConvertToWGS84(resultsOutputFile, + rasterLocationList=args.rasterLocation) + + + diff --git a/python/spaceNetUtilities/__init__.pyc b/python/spaceNetUtilities/__init__.pyc index ab294b91862b606a6c464f9fd49e326de4ad3d4f..9cead97993059fbb971a869bdd01f0137adc2ad3 100644 GIT binary patch delta 29 lcmdnUxPg(K`7h?<$+07UuC)ydYx+IpR`b;dd1^|iN31R>M delta 30 mcmdnMxRH^a`7}HHn6YY%H0!ou|GLt8kS_1%#8wr8{ diff --git a/python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py b/python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py new file mode 100644 index 0000000..1316373 --- /dev/null +++ b/python/spaceNetUtilities/createSpaceNetPOI_Round2GPD.py @@ -0,0 +1,380 @@ +from osgeo import ogr, gdal, osr +from spaceNetUtilities import geoToolsGPD as gT +import json +import os +import subprocess +import geopandas as gpd +from geopandas.geoseries import Polygon +import geopandas_osm + +def returnBoundBoxM(tmpGeom, metersRadius=50): + poly = gT.createPolygonFromCenterPoint(tmpGeom.GetX(), tmpGeom.GetY(), + metersRadius) + + polyEnv = poly.envelope + + + + + return polyEnv +def createPolygonShapeFileFromOSM(polyBounds, pointOfInterestList, outVectorFile='', debug=True): + gdfOSM = gpd.GeoDataFrame([]) + for pointOfInterestKey in pointOfInterestList.keys(): + gdfPOITemp = geopandas_osm.osm.query_osm('way', polyBounds, recurse='down', tags='{}'.format(pointOfInterestKey)) + gdfOSM = gdfOSM.concat(gdfPOITemp) + + gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOSM, outVectorFile=outVectorFile) + + return gdfFinal + + +def createPolygonShapeFileFromShapefile(srcVectorFile, pointOfInterestList, outVectorFile=''): + + gdfOrig = gpd.read_file(srcVectorFile) + gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOrig, outVectorFile=outVectorFile) + + return gdfFinal + + + +def createPolygonShapeFileGPD(pointOfInterestList, gdfOrig, outVectorFile=''): + + firstKey = True + ## Iterage through keys for valid points of interest, These correspond to OSM Tags such as Power or Places. + for pointOfInterestKey in pointOfInterestList.keys(): + ## Select from Vector File all rows where Point of Interest Columns is not an empty String + gdfKey = gdfOrig[gdfOrig[pointOfInterestKey] != ''] + + # Get list of defined features in the set Tag List, i.e. power=objectName + + objectNameList = pointOfInterestList[pointOfInterestKey].keys() + + # iterate through object names to apply specific actions with regard to the object like bounding box size. + + for objectName in objectNameList: + # select only those objects where the key=object i.e power=tower + gdfPOITemp = gdfKey[gdfKey[pointOfInterestKey] == objectName] + + # "compensator": { + # "featureIdNum": 1, + # "featureChipScaleM": 200, + # "featureBBoxSizeM": 5 + # }, + # project subset to UTM so that actions can be done with regard to meters + gdfPOITempUTM = ox.project_gdf(gdfPOITemp) + # buffer geometry to create circle around point with x meters + gdfPOITempUTM.geometry=gdfPOITempUTM.buffer(pointOfInterestList[pointOfInterestKey][objectName]['featureBBoxSizeM']) + # calculate the envelope of the circle to create a bounding box for each object + gdfPOITempUTM.geometry=gdfPOITempUTM.envelope + + # reporject to wgs84 (lat long) + gdfPOITemp = ox.project_gdf(gdfPOITempUTM, to_latlong=True) + # assign a name to spacefeat for future use in the vector: "power:tower" + gdfPOITemp['spacefeat']=pointOfInterestList[pointOfInterestKey][objectName]['spaceFeatureName'] + + # if first key, create final gdf + if firstKey: + gdfFinal = gdfPOITemp + firstKey = False + else: + # else add new to end of gdfFinal + gdfFinal = gdfFinal.concat(gdfPOITemp) + + if outVectorFile != '': + gdfFinal.to_file(outVectorFile) + + return gdfFinal + +def createPolygonShapeFile(srcVectorFile, outVectorFile, pointOfInterestList): + + ## Turn Point File into PolygonFile + + + srcDS = ogr.Open(srcVectorFile, 0) + inLayer = srcDS.GetLayer() + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + outDriver = ogr.GetDriverByName("GeoJSON") + if os.path.exists(outVectorFile): + outDriver.DeleteDataSource(outVectorFile) + + dstDS = outDriver.CreateDataSource(outVectorFile) + + outLayer = dstDS.CreateLayer('polygons', srs, geom_type=ogr.wkbPolygon) + + # Get Attribute names + inLayerDefn = inLayer.GetLayerDefn() + for i in range(0, inLayerDefn.GetFieldCount()): + fieldDefn = inLayerDefn.GetFieldDefn(i) + outLayer.CreateField(fieldDefn) + + outLayerDefn = outLayer.GetLayerDefn() + + # Copy Features + for i in range(0, inLayer.GetFeatureCount()): + + inFeature = inLayer.GetFeature(i) + outFeature = ogr.Feature(outLayerDefn) + for i in range(0, outLayerDefn.GetFieldCount()): + outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i)) + # Set geometry as centroid + tmpGeom = inFeature.GetGeometryRef() + poly = returnBoundBoxM(tmpGeom, metersRadius=pointOfInterestList[inFeature.GetField('spaceNetFeature')]['featureBBoxSizeM']) + + outFeature.SetGeometry(poly) + + outLayer.CreateFeature(outFeature) + outFeature = None + inFeature = None + + srcDS = None + dstDS = None + +def createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName='', + className='', + outputDirectory='', + seperateImageFolders=False, + minPartialToInclue = 0.70, + fieldName='spacefeat'): + + + chipSummaryList = [] + srcDS = srcDS = ogr.Open(srcVectorFile, 0) + srcLayer = srcDS.GetLayer() + + + shapeSrcList = [] + for shapeFileSrc in shapeFileSrcList: + print(shapeFileSrc[1]) + shapeSrcList.append([ogr.Open(shapeFileSrc[0],0), shapeFileSrc[1]]) + + ## determinePixelSize in Meters + print rasterFileList + print rasterFileList[0][0] + srcRaster = gdal.Open(rasterFileList[0][0]) + geoTransform = srcRaster.GetGeoTransform() + metersPerPix = abs(round(geoTransform[5]*111111,1)) + + imgId = 0 + for feature in srcLayer: + + geom = feature.GetGeometryRef() + + if geom.GetGeometryName != 'POINT': + geom = geom.Centroid() + + + spacenetFeatureName = feature.GetField('spaceNetFeature') + if seperateImageFolders: + className = spacenetFeatureName.replace(' ', '') + else: + className = '' + clipSize = pointOfInterestList[spacenetFeatureName]['featureChipScaleM'] + halfClipSizeXm = round((clipSize/metersPerPix)/2) + + xCenter = geom.GetX() + yCenter = geom.GetY() + + maxXCut = xCenter + halfClipSizeXm*geoTransform[1] + maxYCut = yCenter + abs(halfClipSizeXm*geoTransform[5]) + minYCut = yCenter - abs(halfClipSizeXm*geoTransform[5]) + minXCut = xCenter - halfClipSizeXm*geoTransform[1] + + imgId = imgId + 1 + + chipSummary = gT.createclip(outputDirectory, rasterFileList, shapeSrcList, + maxXCut, maxYCut, minYCut, minXCut, + rasterFileBaseList=[], + minpartialPerc=minPartialToInclue, + outputPrefix='', + createPix=False, + rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), + className=className, + baseName=baseName, + imgId=imgId) + + chipSummaryList.append(chipSummary) + + return chipSummaryList + + +def splitVectorFile(geoJson, latCutOff=-500, lonCutOff=-500): + + + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMax = latCutOff + + if lonCutOff == -500: + longMax = 500 + else: + longMax = lonCutOff + + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTrain, geoJson, + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] + + subprocess.call(cmd) + longMin = -500 + latMin = -500 + if latCutOff == -500: + latMax = 500 + else: + latMin = latCutOff + latMax = 500 + + + if lonCutOff == -500: + longMax = 500 + else: + longMin = lonCutOff + longMax = 500 + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + cmd = ['ogr2ogr', '-f', "GeoJSON", outputGeoJsonTest, geoJson, + '-clipsrc', '{}'.format(longMin), '{}'.format(latMin), '{}'.format(longMax), '{}'.format(latMax)] + subprocess.call(cmd) + + + return outputGeoJsonTrain, outputGeoJsonTest + +def deduplicateGeoJson(geoJsonIn, geoJsonOut='', encoding='UTF-8'): + + + df = gpd.read_file(geoJsonIn, encoding=encoding) + df['y'] = df.geometry.map(lambda p: p.y) + df['x'] = df.geometry.map(lambda p: p.x) + df.drop_duplicates(subset=['spaceNetFeature', 'x', 'y'], keep='first', inplace=True) + df.drop(['x', 'y'], 1, inplace=True) + + if geoJsonOut=='': + geoJsonOut = geoJsonIn.replace('.geojson', 'dedup.geojson') + df.to_file(geoJsonOut, driver='GeoJSON', encoding=encoding) + + return geoJsonOut + +def splitVectorFileGPD(geoJson, latMin, latMax, lonMin, lonMax, encoding='UTF-8'): + + insidePoly = Polygon([(lonMin, latMin), (lonMin, latMax), (lonMax, latMax), (lonMax, latMin)]) + df = gpd.read_file(geoJson, encoding=encoding) + + outputGeoJsonTrain = geoJson.replace('.geojson', 'train.geojson') + df[~df.intersects(insidePoly)].to_file(outputGeoJsonTrain, driver='GeoJSON', encoding=encoding) + + outputGeoJsonTest = geoJson.replace('.geojson', 'test.geojson') + df[df.intersects(insidePoly)].to_file(outputGeoJsonTest, driver='GeoJSON', encoding=encoding) + + return outputGeoJsonTrain, outputGeoJsonTest + + + + + + + + +if __name__ == '__main__': + + createOutVectorFile = True + srcVectorFile = '/path/To/PointOfInterestSummary.geojson' + outVectorFile = '/path/To/PolygonOfInterestSummary.geojson' + outputDirectory = '/path/To/processedDataPOI/' + baseName = 'AOI_3_Paris' + featureDescriptionJson = '../configFiles/OSM_Power.json' + className = 'POIAll' + seperateImageFolders = False + splitGeoJson = False + deduplicateGeoJsonFlag = True + + if deduplicateGeoJsonFlag: + srcVectorFile = deduplicateGeoJson(srcVectorFile) + + splitGeoJson_latMin = -22.94 + splitGeoJson_latMax = 90 + splitGeoJson_lonMin = -43.25 + splitGeoJson_lonMax = 180 + + srcVectorFileList = [[srcVectorFile, 'all']] + + #if splitGeoJson: + # srcVectorTrain, srcVectorTest = splitVectorFileGPD(srcVectorFile, + # latMin=splitGeoJson_latMin, + # latMax=splitGeoJson_latMax, + # lonMin=splitGeoJson_lonMin, + # lonMax=splitGeoJson_lonMax, + # ) + + # srcVectorFileList = [ + # [srcVectorTrain, 'train'], + # [srcVectorTest, 'test'] + # ] + + + + + + # List of Raster Images to Chip and an appropriate label. + # This list will take any type of Raster supported by GDAL + # VRTs are nice becasuse they create a virtual mosaic and allow for images covering a wide area to be considered one + # In this case gdalbuildvrt can be run in a folder of tifs to create the VRT to be handed for processing + rasterFileList = [['/path/To/Pan-VRT.vrt', 'PAN'], + ['/path/To/MUL-VRT.vrt', 'MUL'] + ['/path/To/RGB-PanSharpen-VRT.vrt', 'RGB-PanSharpen'] + ['/path/To/MUL-PanSharpen-VRT.vrt', 'MUL-PanSharpen'] + ] + + + + + ### Define Point of Interest Dictionary + # {'spacenetFeatureName': + # {'featureIdNum': '1', # iterative feature id. Used for object name to class number mapping + # 'featureChipScaleM': 200, # Size of chip to create in meters + # 'featureBBoxSizeM': 10 # Feature Bounding Box Assumption. Code will draw an x meter bounding box around the point + # # indicating in the geojson. This will be used in creating polygons for IOU scoring + # } + + with open(featureDescriptionJson, 'r') as j: + pointOfInterestList = json.load(j) + + # create Polygon of Interet from Point of Interest File. This will create bounding boxes of specified size. + for srcVectorFile, folderType in srcVectorFileList: + outVectorFile = srcVectorFile.replace('.geojson', 'poly.shp') + if createOutVectorFile: + createPolygonShapeFileGPD(srcVectorFile, pointOfInterestList, outVectorFile=outVectorFile) + + outputDirectoryTmp = os.path.join(outputDirectory, folderType) + + shapeFileSrcList = [ + [outVectorFile, 'POIAll'] + ] + # create Folder Structure to place files into. + + for rasterFile in rasterFileList: + for keyName in pointOfInterestList.keys(): + tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', '')) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + for objectName in pointOfInterestList[keyName].keys(): + tmpPath = os.path.join(outputDirectoryTmp, rasterFile[1], keyName.replace(' ', ''), objectName.replace(" ","")) + if not os.path.exists(tmpPath): + os.makedirs(tmpPath) + + # create Processed Point of Interest Data. + createProcessedPOIData(srcVectorFile, pointOfInterestList, rasterFileList, shapeFileSrcList, + baseName=baseName, + className='', + outputDirectory=outputDirectoryTmp, + seperateImageFolders=seperateImageFolders, + minPartialToInclue=0.70) + + + + + diff --git a/python/spaceNetUtilities/evalGraphTools.pyc b/python/spaceNetUtilities/evalGraphTools.pyc new file mode 100644 index 0000000000000000000000000000000000000000..313e7b270b34ccee0f5d36f975b4ac6e9ac6d2c6 GIT binary patch literal 8777 zcmd5>&u<)8a<1+k4n=DCn-n?v*|KDdmT5_ry>v_nt3{;M*c(}~TiTUETNt#an`Bcn z-NSxwsG%`)l7b-D1+vHj$sUr$=9X)GNdAEYK~6aa$stGr1OWmBz9fg_`>JP#lw}0S zMF=_5{a(Fa^{VQ9RrSXEkEzLrOILoYA%nj$ynleN`DZ*Ld>m@nv=~rP%+<_ zyl`Z5URV~17bISks4OqA+LAPY@QmTMnJXu{R z`Mj3ctEL?`;(M|Eu}zwZO=45O9qz|>ueJ~DPL{sfd^O$=o3)WDub>~5ZJ@YjT&H~M zg-~WmWs{UD&V*ZW#+&k;yaG7D)&F3^KFrSS)bzqVfBxxibS5HJR#jJ z2P%XjQN8{6k@F>#JSqL-RA2rH&7#(ugz{4e83QWRHVD=J6s6pol3kBOspe^=96v&t zksdzNveKW``F<|2p;~XYn6LYH(nIgT(tjgddQSTD(wpO;r)A0iiYuU6?Jr1LrWv6? z&c2}?*3bg8ETJ&Z!Waq*#oSOJXsF>3!WN&qPJdB)IFsY@)Rp`<98U7ciH>pHaF74X zlzWey6r7Z0cZ}0uyH0OhE0g4}FmwkkJxFLyZtVR^9i zf$i#Pv^%kHLJq~lC}}WP(J?VTCQ9Fxut zf|b+E>TWo0d&innXnT-gLpJkYmXg1f<-XxjDwy7Ya=>ET$kNE*Q1S2IZ?2Llg$s*W zENh94b9`F`?wHtW^V9gi1l*(oP}pu`CkjkpB=AV*18Q023AS^1gv};Sev+FynYBB{ zp7<`RVxw-?5*!9XD=-5CmF@jFe*%3ferYfo)bt`x(DDlE-47&vcw}2`LROD2z z!nMr`0drfrKc#Ao9`Dv`M(r7_sc>zQ)&R0EfMkAxhm4g0)@7&SR`pr-CcSxQ-kD_0 zs{+iY+_DQe$A86Jaw?#YX>G!p@uu7bXWE^1=bZ&_h9fYJ{gdmMIpOr%U!VQ$fBpRH zU)*fa({13(Q*;Yo^E;rxXz_9TV{`~3_YH6TTmc+7xQwscAE%S({%DMj0dbQ{`vx}R z`CQ!FkFSk=S)%JG`~+MA5ExiTyuQPDfDkz$)uERFW&)8w2C#t)l&&7Bm8JUJ6;3#< zpBWkWioYdR*Dyl;4;%%68fcA$P#|>*1$ZS80H#%V4wgzN0r%%+cS8Nif)Mvnozyh| zlP3h=39z(qJr&u71DhV8DnJ~F3J@il!xvRmMsQ$NLb`@HbFu^<#6anRvv)_hJOVp7 zOGg$--94m&M9S|v1ZpraD~sE3k|!m-&C^ZsBV1JPhOW=yMKuUp?w`^&c&?)D6x*N{ z#D(WEI-0@OPh#uPWdHO?rDvpnR=SIh#coe>s@spt>dVhb-`DlwID9tr&r9$8P&{u) z|AO@1;N;c*Md@8AN^jEh?84hz&&k=|8#3W5H$hjRqO0jtY63@9!4uSp6uYemW0f#mMri#;j5Yy7}u&cTXr zKmJP<=zG$?F1`1pcb(?V4OT}ReD9?VvW5-1{r9E+!SLW-f2j*=+U0xFUl)5ydhbi` z1KGmF$Q9OEz#=rZ52c3>Rx1^&ogVN&4P9nfrh(WuUIdc{cQvpFjhsTEz!5M5QnmK+ z-ZvgT4@o}5y+bP4os)WkbBG8px-3sU0+5ACp3?(^Gjo~+wnC|kd^=JFb6EFF=;ha> z|B>`Ql3kn-gq4U6p-!j`!+nX)ON4`g69F@WiNRpqF^9czfYe~w`%TDQ#AP(6u z>g=%@b^oTGq>0_ofTM_N0A&38mz#saKipJbR)lVh2_Ts`ZzIAm))YbDklPLQ_ru!f z8tLpL+dI#~1C3FbjYXDGKv$m!+8-R}j908-kfqT}f}k>fR&Qd74MPu3;Scyl!-mdQ z#7AuHv~vV|YyQ5zH_QrXeyx6mqrNB_AAeCaK31l-l5K-LSEp%Y%`Dfr^1}N1M;|pB zN)4Osop5_g|NiNw@<|;$1OGdThxsjCxyhPfwGpF_nrV5Kdj3u>4wS)(GC)NT$dcA%0RcG10Zk9pk6_T&0!@9k!BkzhJc+Re-work4ja}AhVoa?%XeCsx`P#pJz zs|&Nr7)=FKt7}k9ogY2w!8$wWDKPa$C*O~wdfYYvQyS&7J4iDQ@a6`GhoQ~{t&X(o zJE%$8(Bsh2=f2K^GaFpyq~EfBo>uKvS5beJrEwh+R^e&|yW#$*NHWiagS%t7(7e&t zK_M61!W5S)C>&@<7}|gm!bm837>MQ-N6)>^pB6}_g8KHc)eaDbCLv@70?{&0|E}&J z(j7oR#qAiE>b9=wk_UKG+uc^dY^zw0iA@p(YiJ4H#X}93*`?9bNE0{&32of3H?xK| zZXs{8nl34cDr|;FDNLP+X>~P6?3rDL1Rbr(6c<4Ph>mLZ1-fXl02t-JQ|}Jmg$VZt z_5J#RZkmuygwsv{Bi&`#UB!y8@j{E)YvJf-7{V>NqdF~1}?lT)tmyyI28IcL%F zk>$)W7jh?&>&$pd?nU>EQ!dToTR|3d&N+>%ud*}cRGk_8v)9z%zwFFg%|08*Yn4(_Od3k?e%|;A*1;fxT5%Hv4`X+5d-^QPpVoK-75pBDG4Zbr zXHq6dwK`o8ox@9%I+F0VJ@cL9qm0wHjt1UIQg}yDUh~mt=W#pC_xG{xM0Ro+&r6JkW4_oynpo^Ftc^I>M zqzjjHm|g*J7)%h>28%q#JajbU-VYGedi?o>$-O41H&{Q$t9aNpOmv+Vj6l8gDiwF8 zlT1O86}$lEjWMgkX#f#9|BXT0u~*tAcz6ieFmjE%NMj{{+Cpa-$ix zoKZEe>7Vhy`jO@^A^nB!p!mfJs!`t>K*?o#2JRDu2}4bv{5c*Pv;=?1stTN@{r^B@dZRJ;No%O&-=x%tNaD??%)Xsp zQ?-gaNT1pVnrR0Np@Q3de8N^u;6lsfv6(!-*;Mdte>kod)hw6ss zT@@7k1xkkotEJcS<2WUKqm@OSX8e5uin)kK1yMc!XY@JioG4WmJZJvXvg0|GpH@~X IZ_QNx2dxznxc~qF literal 0 HcmV?d00001 diff --git a/python/spaceNetUtilities/evalTools.pyc b/python/spaceNetUtilities/evalTools.pyc index 1c9bf52e7413385c7c46adafa000ddfbdc6fa99d..eefc2fccf7de13ec5cfda57396767a046d3305c8 100644 GIT binary patch delta 139 zcmaE(_(qX~`72b8+-AA*= threshold: + true_pos_count += 1 + truth_index.delete(fidlist[np.argmax(iou_list)], truth_polys[fidlist[np.argmax(iou_list)]].GetEnvelope()) + #del truth_polys[fidlist[np.argmax(iou_list)]] + if resultGeoJsonName: + feature = ogr.Feature(layer.GetLayerDefn()) + feature.SetField('ImageId', imageId) + feature.SetField('BuildingId', fidlist[np.argmax(iou_list)]) + feature.SetField('IOUScore', maxiou) + feature.SetGeometry(test_poly) + + + else: + false_pos_count += 1 + + if resultGeoJsonName: + feature = ogr.Feature(layer.GetLayerDefn()) + feature.SetField('ImageId', imageId) + feature.SetField('BuildingId', -1) + feature.SetField('IOUScore', maxiou) + feature.SetGeometry(test_poly) + + + + + + + else: + false_pos_count += 1 + + if resultGeoJsonName: + feature = ogr.Feature(layer.GetLayerDefn()) + feature.SetField('ImageId', imageId) + feature.SetField('BuildingId', 0) + feature.SetField('IOUScore', 0) + feature.SetGeometry(test_poly) + + if resultGeoJsonName: + layer.CreateFeature(feature) + feature.Destroy() + + if resultGeoJsonName: + datasource.Destroy() + + + false_neg_count = truth_poly_count - true_pos_count + + return true_pos_count, false_pos_count, false_neg_count + + +def evalfunction((image_id, test_polys, truth_polys, truth_index), + resultGeoJsonName=[], + threshold = 0.5): + + + if len(truth_polys)==0: + true_pos_count = 0 + false_pos_count = len(test_polys) + false_neg_count = 0 + else: + true_pos_count, false_pos_count, false_neg_count = score(test_polys, truth_polys.tolist(), + truth_index=truth_index, + resultGeoJsonName=resultGeoJsonName, + imageId=image_id, + threshold=threshold + ) + + + if (true_pos_count > 0): + + precision = float(true_pos_count) / (float(true_pos_count) + float(false_pos_count)) + recall = float(true_pos_count) / (float(true_pos_count) + float(false_neg_count)) + F1score = 2.0 * precision * recall / (precision + recall) + else: + F1score = 0 + return ((F1score, true_pos_count, false_pos_count, false_neg_count), image_id) + + +def create_eval_function_input((image_ids, (prop_polysIdList, prop_polysPoly), (sol_polysIdsList, sol_polysPoly))): + + evalFunctionInput = [] + + + for image_id in image_ids: + test_polys = prop_polysPoly[np.argwhere(prop_polysIdList == image_id).flatten()] + truth_polys = sol_polysPoly[np.argwhere(sol_polysIdsList == image_id).flatten()] + truth_index = gT.create_rtree_from_poly(truth_polys) + evalFunctionInput.append([image_id, test_polys, truth_polys, truth_index]) + + return evalFunctionInput + + diff --git a/python/spaceNetUtilities/evalToolsGPD.pyc b/python/spaceNetUtilities/evalToolsGPD.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d32ab4ee21dc00e706a752367d7ea295371fd4f GIT binary patch literal 4120 zcmcgvO>Y~?5v`daDN!P=Z(DNKAD0D)1Xv|NkO0YMy;(C>wAN~!kqsk)4PwM;lEWR& zP%}+i3doav$|?5*$UXldKO)z}haBq^&^L|B+E82G@uF9w?$5nCqnB&P?M~-U} zRs>}63#;-L6BbB@QsLEd76I(Is1En?T`KlmKB9CU4k&1pF5dgv_SA>$#Q z{BLNm)XdYiB2!ngzlaqXIx-cKZ(NyG5R00}!(sN-6I^qxqEIxJt|4-8Ryq_kvf&Leh*hVEIqD+g{u`Zha zQmN%T3otSes+Pcx;l;*148RM;&`U zHtLBWGl8XIlZ3MLNstA_&Xl$$x1BMxw9BNji5)1JU!kKUG-tYRFAC!*w?EIa;Ji>U zopi|2!NiZ4h zcGG+m{c10=yZJcinnPos*(i=|Wb#fscx4WD$LDsCCf^+1H8ASaG>!AUC+&|QMXN;V zgl5X`qY_VjaSI=*>Gok-`eI6v|Zs9F8jh%=z`XC{V7V*>8x=p{`3n5{#e~ zx}2~i(DYDNFvzxF{==CyX^!l*3vf)ekIVDMlNY%7H)Mt_Z%F>wq3u>B zSrNM_!!0r~ADk(ExXx9{R$N^2ur5Dlh`^=4DbqswRw=#13w@2XGu)Op(8a|8;S1~X zC_BQ#-qrzZvL@3t8GeV>fu~fopkelF%Uv0S6VxOQCx5`B(Kx5pJ$>gQ?x>>bx9r`w z+%#Oes>J`;=bf7v2DJZYAE1sjIK>;bKMMM0 zKa3uuZy7H>D`oaWjYj)lK0E5BnTeQ)G$*z0Q51JlVv}g7O>>QlDd&PkA)Bp4O( zHXkE^AlB)Ow4d1(%-l1!okg!q_SyL%1$pPq8G>GJSJ7>o*jUpJY;crLvaV6)Lx$a} zOdhmg9t7uz;?(~2S10rQz#~42OdPgNuQ>jfkDeac4ChxZkBoheh&s@yT&$zwx(-D5 zkr4V=mR17kn;`bl75^3*J$EwJLydwnJwT{h7r7(rlS>V8)L$g8Oq?QnaH&T)#UwKt z`!S}ftA3NIcQq{glJ-L5_JazK`Ibx8; zp-;C;E_Iu1ENNkAyjMXe2D=)Bx)^sf35Du|8V$ok20YGBAuXPs5xwVBv z`8D>1V7(k)W>!Qv+eD(+#6SKpt075|#(VJB4_6f-c zZ1yP1+cnNiH_GQVBZwf5i+lcqoN#%sAZ1C+LaaAQiaHjwR$rd1RM&}CV3Gg)u2&lF zedoTj1Dtgk-H9*W--meeKcFGT;C)9+2~-nBO`&*$r2)A!OTe^W#^rlL`Q8CAEf9i$ zLIJNGR%$4Xiq{f&^ZGCfF%%paP<*h=s_Co3905Q`zkEe#$#V=I>5I`}i~9PzXw+{( z)_*-P2(E-lFAl6ViO)=BZ=&gH0x4bvI=u`fJ&SaI5ks0Z{l}D~Q#BI8|3x+$XX&^o z;P=A=7Um3)mrUjxO;C9n&&6_;Bfu`0D#kEJ2R$ksw4aP8);|FE8r0q^Y;I%!(l}JE zGlx$nqI$pMDH!ZM*>-DQ)47N5igU*;-gtPQlF4X%UbulELdh|I6;SK@cfq{Ooq8qx tC*&QYxjd@&-ov{IJ^t}14JWbrguu@?(KK*58VJRW+cn2?8c*@u{x3fcgTVj* literal 0 HcmV?d00001 diff --git a/python/spaceNetUtilities/geoTools.pyc b/python/spaceNetUtilities/geoTools.pyc index 9ae04e84d4a28d8c85365a5dfaca7648e20de86e..5b804fdcb4cbeacbe4d446c8def42f954357bc59 100644 GIT binary patch delta 12071 zcmcIq3vgUlc|PZ^w0cXc)yuMES+-<*{fy=JGmc}~vSV3xu4KzrY|CrimAuMoSK7UD zBxl7Y#HGBZV9qcFIzY>-fhlPRB?M?1NFXqUriC(7CWZ3qlmZQt=`==8B--BQ3tnWti#f+t5rD~u| zIZ8Pwm#cwtDf?8cLXB0bfl8&>SD<25YAm1z0&1*U4ODxrg(?N1^#;f~PtX7TH zsewALUZi66YOFyGG5rx6$WrjmAY=Jfi~p? zq`h4^)#`c%BUZ^Y%*4N-y6#g0t67e$QBIA_0baGrsgqT_VfE^Ip&D45H?ct`u9GoK zl+!2^*JlTCsiiVtLtaZ0S~6;2qjE5zS+L%uMil>LQAWGB-!gT*NDbVjoEE`-vvQV8 zp+h;XQrM!L6;jx$oRw18rkpk@Y*$XZ6m}?Ql@xX=XSEb|sSGKzM)uyVoRGBcQ4UUq zqX6smg7w}U9zY-H;vWXAl>xvH=&q46*kqHsUZDo|@eB^o6)Hx2YIMJHHmk>UZXNV? zsO!aQAai%F2}^I0Rd52}zFkgyKvn_bKzWxMDNxRC!Sx>H>=9fKDrc`0?p4k{{^NP} z3(|+A1^C@9chM=;S{ZOa1{?;*7pUIQJ?8hUjhRBo#HmCy?uIOs+yYjUiBY$NwPZM+ z8crm~LYg%;4JQ+0gOTynh}Tra;fdJvNFwevl%wIqWXz4;)LM$x?r1#H>n5Y|?9?h5 zS%IPRJ?694_SM9r7(Zfm2)~pcySqw_YLx~7wR+4_GnRQvVM}J}%fvYCBZ)gNJg?59 zr%=uKRobsc3)N#v%@nD0kxK4UX|0lfF719wJnzJ(9`vc?D=J-}MvGL`m+LK%-U79Q ztIUnIyt=G9TFkW_5N`(-(i5+!iQoL+m&vWNPfhGo{lKO-QX1^7*%5|CL*Ns*=pkLSNwR)M)+*kOInHKa% z$zeA4w95!&VPb8Q9c*fEoA}!S0%C0k32F)I07B$WyPiN+;nsE|fKXu^Jeu-WZej;< z${~-5a)brguQruM4Z6duE_wy_0K;#yy-QpVI!;$40a_-fJxq8zf{9B^#wZQkA2MRk zVP)Tf#$s5!TwJZY%x{XDGQ0quk{odeztm|E4H{?0NAZ}GjkoO4GX=`kYSbc6g4x08 z4hmphZ9zV8lj%b0F9?#JD%PlUUQn8110nf3DF)7W{Hl>sQY2E+rQHmq0n&t?V(Fo1 zSt|Js1(wTplz1J*D()A=iYc2AmMDZ2^Ga1COxBK@*Dr@$714a{NEg$1c@ov7C%W<$ z=8hRH(u^%j_5aJ3s=psZx&JJ1?hWy_dt5SK^0H_?$NZwCZ8_OgWH8Du=nMe-a;$51 zl=c*Rk`i~#_0rb6$e_ZTF1dLqb#-t&>|U`OQ1JvZnV5__eaT7p$`J^qdA@W{-Fhy# zffMmtggFysl^p}d}InZK#3*B_q! zUe#%>Tg<-dlX``Db9I}pG=En8lR}$1%KT|?V8OK2=1NUKKQTL9^DL+Bs#_W8C&p(9 zL?x^+qjg<=lE6M}o~~Q2+sxPM5=%v{AQW@ry%+(Xtqz3kHbT=*z0Qi!41raGtq&1f2*F|wfws8l$`G{SCc~~QF5t%VNt{}I0OcD?Moam z&OY9lFs+A28{24f?wuSS2Fui%^0vl++`u)o+7A;vZYI_?nx3`<+VP*Q8+|!Pb~Eyl;sKy27@z1=%;5h zQ>>=m$-$i$Zm5|O>F~OXq`Op2MJWjB5;a=Fg)pb{!f7>ACQYR(UB;&KJ~dNbrDiJB zOr^px)8#5%VNQjXX3!W==~6X{mMWDFh@F{u8(f=WHCn+QOxF`5UPC1<4biHmr#6ZS zD8R0XkESQ`4+K+$18udYj#+~KzpI&G9{SnSos;7Qe;dbp^{2b87o6^#*N}g@0+npE z=Fb?MJL4Wb-#UNtJ-Yv<(-+I>PrPK!-zP{5!qwz13^%{wl`}<`ziKbBB1`9SAX`?tkwdE;(X}A%kv?P zXzUOr*CqnYl=3 zeaS8=vTIN@_Pk0BaI$4rn2vR87AVqH>+LUK!VRhyPEfvbT?O?*kDM+<5DrApYSc@t zHCC64om6GE^swmBhLaLTRKedd!G>#ip_4l9lzw;ahth zY=XXq$&+!=$_N}sp{TK0rAk+dC{{aAk(eX@gum9X*U6NI9Zp04;9NLG6+;h(E+8wSk;e+GnOYTZQZ7d14?;J4 zHF~FBQ0ZD6Ev#nhXt=QRP8zauHCn5phMfvc>$6vZY*BU-N{sM zxCluE3`xn0>BXN)M#f{|p@4NL`pdrtO|k}ITJ5yQXI6XrK@x~g1mBn1^y4BFQ|QJ_31jx4~>o%)clwg z)FoDhF4X~Dq-*iFNC$mhtzO!KX4j5qh6T?S>@;tZLcG506|nSe0{pVqMz7tz-K~6y z9S|AsY9doE@=%g}vagweo$Jf;=wjZwvp(=63|&Os{-j{Fsok|k-!S`jy&GY{-|t%E z=gM}qDcaq>oZMvpoS=z-tbK=t9^bu0f51GqdwSq!99YCD|4AS#{1;2!+ARHo;Fkar zCP?(p1kPZLH~T8LWZdgFzuFVfA2k(wQvq5{dzNeZSbEaDb#L7+*@JaqMK_Xi2XkB{ z`*^vwsKbr#aJaqu0{Y{#-TSmY$rbY)mcCmz$dJ~%n)jOJ2Y1)Kg^h1zBYqxOkfdAXw;x5 z>d?b=;uDau7hP|)8Sbpn|7xyvzPNM+2Ij@v+{Ydyc-n*yA8G$Oi*p#-l{ic;qj-B< zSDUp*TCFmlIez4YE?N9`x5G%*Y7j$Jz9rE;!{Vr=zlTaKl%-A%nQe!)cee}`_?oc^QD)l(dD#Au!D@UzG$WE z)#y^#S~GZjN$o_`mu^s_tujI@Iw3zZKe+Em?G9gd9FljEn%5IibLjZ-ZPga-JIpui z9K*e83;+6|(0WC;WbIQij=Kr*F_Y5vUi19%4!z&}^!SFri|Fti$rYqXmD$`~zr-^M zs7szEH6Dw)P%-vFbFq8nrn@=rUV?8C93to>5Z0G`S2wFY1SicC-5vg8EFLlc)V*r^ z2^2-uBke&`U`K{+ni#PwqTi3j;hCpk8lph;Np`()_P~iry@Hvk!;dF$R)ROjf3o^i!h28AbW!uJiV;Q{tmm_%^OZ9 z^-1%q(_6}Ra_A`n+w4Ab^k|U98Uo5v9*fSiZ4Y{7+oZ%Z%xo@{6-%4HJQLB4=C1w& zy2&K^)4JEZ+~0DG`iP#bLvAZ`_6aMhjNVpfTF#f0(gooSr(QGLaehQszmo=snFHa$ z&I_+MAHI+Xf|uc4fe*o&jc^f1N=^GkN59{^;bOeurxr3CX*{?VsKy4kaSd>Z8=MML zIT%_7e;F<}{9~;eW|lFh1UH>YZpcTK=^b3z3BL;tun&IMC#`!m*hB zM$X1BYsZDLgcZ6mE3}*)gIw`EL5u(tRrU-)=3&BX1iW8c_~Il>!W{RbB(`#hu9L_R zHN4%+c1B6GE{7)F{;pnogl)XTp)1jG4i%>f)K>O(c2V-k*WKZ1q!fjRdcysX%6ZRoYoEqao?|O_)WPaOjyt0> zfE!qUE$f#E+E^8x^(|I;jLo~M4rDK31QJMdV8_j{|rqmB1r#|aVybf)cbmL|-1hud_s@sF(4(`NI?lJ(-; z!#&|O3eySu&9UR6H@Mn-X{1SyntvX7=@wnGJX)#$#B7RQ!CS@eN8c3qU2gq)0M9EM zGFwJFZkhD_Xnp2mUJ~J;}QEq=#%FU&j!2?qihbfQv@41lg1>L7!rMd zB@%XQW~prQt#l0~Q{D?kAzGsGuFfOk_Cq&1!(JG*n?uRae71ImWg#R2f+@*fE-rH! z+O^2kcr4*WaG+D+xD!svk)EOCY(}ptsMC#j_g{iv9U|CfM6fN0O_x|`!!N(B*;nq1 z63HQYC(LKCB0|=Wb20_8WIovj;mLqIkXXX)kWJ*_8?9N^m-p^Lfqef#0tqG>DK@;B zFd2vUkgZH|bzWF6?;(YJD4}m4z6A1iu=ErkB-G@)4=k)F&VUt=j6yC;t9>FM*)?+s zzNsN5gE}ufz?3_)Z6&!EneS3Ok(;ArL5?8p7$C3oP`s4(N)<;IO0sne+L^ZkCgPar zi4UuOWGujrQJ4dC0Tt$=W{|Q2GvmBfil>%x*HIGfghm^ZZdq-Fu>m2Qmv9fjKOwz) zJXRC7A??SL5j;~}jSRX8Sl7hJbcimb$2zI8I3KxSj^JxBaWXgpUiuhW^`qoS^3v6C zY!b08*BipyY9txMw?I$0?{Mi79B;lt<@_IhPb*N}lSHk0w*tD>6Vmu|z2A3L( zBs0jJQx;OuF+>H!(_S?H$3)}t92#z7kOIIH*-x;MCf!4b`T9--+2Qz55fSbiGlseVF=7v zN{lW~<9#G04iUIh*>B=fJR3XNEDOp=@37H`(`~bz(hof&c;J78BJzDt7Jm%u0v6nw z!b*&!Cz6NQ9Rz$0=D>v-REsG%es7pfjfcMJ}r7dVgUJ!&3=*;e~QE+RW8d{0_11-6wy>F5xW`@ zO<_NaAzrFbNXR(H8@xd77$|uiHb0$-Bfhbp;~vj*54yBuZLm3VY5XF=rBcZ?<+g5x>FaoeKrWr>S*piidnXBg+gUDG}ZT^=6aE zF0sn30M+O%v5BD(zd{q{X4XG9WNySwAFGkmTqaj<8Aj!kl5aW1!Ejq|H%QjP$V_f1 z^}m;NeVP;zz7QvdsMuV?UPZ8nppW1T!8ifDTxCB(@J9rb1TPRs!ubmB?w1Q>o>EpRcmkfQ0Cuq58m&SU$w){TulQDI(XtD>A2L#w zp|;g{9H(~E+hm$)oyLiij-AG_OCM>gCZkT$Kaxr7q%)myoKE66buyhaex$C)Da(`q$3ZV2-sSZJlz~D`$hcTA=#DL&#mq*{Gbm)o`AwbM6tX(PNX$w1NIV9@%Us z{p$+z5GTE9u;yr>4MbHR4l`DK+I&S)Us5 zliz;;`cL`QWJnB;y0KK^a>n(8i#wAd z`g|$rJ8>=%AF{6pcGYtmW~$i1SK$lkVqZYl`$C|0Pr)10 zHK>ns!3|wtIF%=nS{WY+?J!a-B1cn9F-eGo;v>>f#-d0GlPf2wAQ7RJ%0cHo_F!I*MnR17W+YP z$z1T!%uakVUk4dROlgMTT>L%#48zWq5JZ$AS`Z)z2te0k+J^vyuMiN9VzBDTi!5{v z3^54dW5in)eFLf<@q?h8uj(!E>d*nQ3sf9IjGl$69=6z*N173oq~Ylky86Rp-m0+BLW-*AJ0ji)AQ}WhI;I(lICS}rWapHH^P;axE#t{ z8XSwd7wlxow(1s+T>*0YaBE+~bxX&h&chd73_ggG7kq^QU7?rg68u);cdf4W%?y<$ zbeItnKLp~%Nz2TS%hu>65=?7R>oUHbbo^2*Z9K2qw({mMBk2xm*uN;R)Pwe)%9DE0 zhFEPsQ&FQIpZRLVNv&(Fzv`&2v&XAe=mPt2)jOpwL)lG7lbQ3W^r#)J?q4jR-L}<) z^(SXKYQ8}M@71=1k6?`HA(0JJXIIwkZ|`M!JEd`t?apN4!^z`V%Xlb2?Nlw7c*%?c^=~Ub?57=nq;p#d{ zyFfChhiRDk$Hq#nFWG-v+IF*5ruD zkl_TmVI&I%;zE=;}^~;Zi2hn(YPqr191Ume$ znt6BmRX+|oo+1sb;~MXoI1d*H zf(e%Fog7Q0-4m(4i*AJOGUAPKa%qx31#zpCeXs4&gO|v2hok}Kz41v|l(BAS(wt(~ zJ~ngNu9!bz`5BfOxrFfHR1za{vj^<;_6;{XqGx53USpFh&y-RV?n}X;ti6BLx@m0D z5kLZ91;7yyU@8y_WC-L=Hh|GZpGr4qb`Ee?14!7vNKHP$Ze0V9sHvbdc+EJ@&|Iu0 zPb+MIY*38^dBCBvYhbUM3Q1M5%7*G#ch;|_O4L-TnkrK++DcTm)c$gH!*p0pm8znLmW!J#FYN~R9 zx^vciR%I*E{3*|x?pbT+%!%J@&dG)5e6%&NVI-`w_$wDAAD_~TtTSH4LhJHWy23Ya zW96)kyP}vH}5}84hL1KNzWp9dfq{vMW~{KehZ7L7Fjgp z>&t0Xwa_15UJy`4qS2&(UZDz6sQUk7!tq;(BaJ6akC9#a zJXmw%F6FkDAM*nKN#b?5yJzq56Q;Xozu-ad(e9oT5y4!-)S^CsKxUl*qPLWXvk_5c(_Re`UJN zUxw_D$8m)&K~7k&>wIhMo9jDu&CKczMr$=Qbyur@nu~kmYMOsQmH7q;f!E6$Bjv1< zr81#3om=Bp%fFf3ZVZ?Zv-jjt)y=q=%`kl4IzcWjCb{uR4(oy=d8$O6ql zu=zUc=GM#nEsKPHf$Ky(8Ixcbo<~5Cj z3b>7{hF8xTz^dGP2nNLrZtQvBGyyuKMnHj9B{}5QER;%fiJELC@^lSUYruB4R80eZ zIS>H8Xx@)&WSHro#*vi;L}NNZc%-(>HPIOup68nitK<^pmfzSX*Q(Jv`8+|hYv4GK z1RMu|{w-Jl9xW9}YNvTM!_~fnO@&Bov3Qll*j!F5$?ja8_VV3eF2h}``^K-`Lg=k! zmZ_1d+>*>ooksG^wVoAuP4$hxrjtMzzc>T9gXA5S17`6uG5)@^1%W=yHoOa7hFfsG z8$a)1(Mu?qmq8X^r&h3mxpRhNdygJIz5nQaJ zL=;(K5o*|u&G-~`(3zi(?_Sp<7BoKg8I zG_IQhIn{t0!cLhE;SLbWFY(zO+Z&3T{6QVo1-eErvGMKK&dV${oR4{zxeg-ho-?0y zKOn&;hu+wK%G_#2zRQM-Ad>dQCSBaPBg~i0xP5ZRn&Jgk+3P!M!aqdQZ9xF0b=|j8 z@3o2ho&nefcdiUjvyadI0dXt42rd_Fb7h&r{$F6zC`S6-s5O010F1 zOl-)Fr;^@;Y?u7njb+@y*^n?lvv2QexY_L&c8B%I%;nu$cTn90SAh#G3gXMjK-d>! zJV53wvE(Il$-cCw?q;#=onbv;w{%|AtL+y%m*3POT#WG(?&u&+(SE#~JZ|sq%G}h; z`uA4oCvDB%UsV5^+K7hlk@I55R_<#L%N_1GDm|Bc#&+-9TK#c$dYqN`cx3ULMO(m3 zqU{f6KG+x6`h*=iuXQzK2pZlE=9G~c#AKH5_0__MWYq*>~{v+`|Q)w{21qd!}tM({2xr!Ajd z#vPmNJyaF);V8nfgJw=U9`cQc?Du+`^hWz%y&LsCw&~c~os!kSJQZ0^!wW%XZO-=JOE2bTH&^(93kAZ{~MT%;e$XzxDJdbgTW==`+&>oUj;z=Z!XX zED|cj{BzMvEJ;o0LvnuK%K`?`73S>AJymj}nr6z_LK%d!5#(Q{9#yzn;6?OY#)Wn& zIp*5W^#@A?wD}MdbxpbbT7O#auq|iqE^eia`$+D$gJ%vL_zlJw#j-^0e5}sL?{>~X zebSO>b0HS*nzxjF>r70S+mmPS*Om6uXS2<_IErcyUBC)XjE+Xrmv6Vwuj6=l3K`Z0 z``o~b(?iu5MWEw3M&D-s2ZZ^QU)4?_zrdq)9=IBLRzx+*s6~k4nYvan#8JuDM5S3k zv)rE3?=nN&e}Hv(m_`m)r?5|O=dZ`5oSYwA2niP0Jh0eq;PS!YekhODwNtYANVT}B zo>f`-Rsg(C;@J@rtC$sH?Xgd>VUYtNmCU7q$ZB!ZMZJ6vgMEb*ifJ3NQM{BRVZv7~ zP@`&EJyuXKRj-gzISOf0DU9#Q&^ukJ=D52)4o=1CYe|>9$I0K|$Iwwm9K3^^czGwe z`$^~!@BA)jQE>18If=Yu^$wREe>fy1DpGjJx8P+xO_c%>;i@$|!lDP{_syL{FzG^(Dmg#(?Qz-G*7X)8|bc;7V-tuODWx*F@ zpt%Hes0^Z-@Zr{wFrCB`-OnNoqyi2|sK+%$$#} zi>AfCGSqlt9{6s7d2)U+ReXTaoMzB1r$CyCK*yuP!3;9JI|pK0v23Wf7&_O>%u^!8ih3VM1w65STA=Vlf=NahY1BW}9o;WEgMdmzjECQ6*K zj71(S;!HVI%w-lIAPKWo&Qm#@2@;XjY!IiNVe4TMapVqr{X*?Dp+`_bd?Vk_iGu{3 z=2!lXM~P;DO8RHS4f-Rpuao?O~C26ap(2xB7ae3sHUj0sJbXplwZ+Wwyf^1igkV+y0_v~=tz+++itTTYioMC U2zB0XNdAX<{j%QZxw9_tf7=aeSO5S3 diff --git a/python/spaceNetUtilities/geoToolsGPD.py b/python/spaceNetUtilities/geoToolsGPD.py index d39ff9d..f5029bc 100644 --- a/python/spaceNetUtilities/geoToolsGPD.py +++ b/python/spaceNetUtilities/geoToolsGPD.py @@ -147,13 +147,15 @@ def geomGeo2geomPixel(geom, affineObject=[], input_raster='', gdal_geomTransform else: affineObject = af.Affine.from_gdal(gdal_geomTransform) + affineObjectInv = ~affineObject + geomTransform = shapely.affinity.affine_transform(geom, - [affineObject.a, - affineObject.b, - affineObject.d, - affineObject.e, - affineObject.xoff, - affineObject.yoff] + [affineObjectInv.a, + affineObjectInv.b, + affineObjectInv.d, + affineObjectInv.e, + affineObjectInv.xoff, + affineObjectInv.yoff] ) return geomTransform @@ -171,7 +173,7 @@ def geomPixel2geomGeo(geom, affineObject=[], input_raster='', gdal_geomTransform else: affineObject = af.Affine.from_gdal(gdal_geomTransform) - affineObject = affineObject.__invert__() + geomTransform = shapely.affinity.affine_transform(geom, [affineObject.a, affineObject.b, @@ -255,28 +257,11 @@ def create_rtreefromdict(buildinglist): return index -def data_gen_rtree(poly_list): - - for i, poly in enumerate(poly_list): - - yield (i, poly.bounds) - - -def create_rtree_from_poly(poly_list, shapely=True): - # create index - index = rtree.index.Index(interleaved=shapely) - index.Rtree('bulk', data_gen_rtree(poly_list)) - #for idx, building in enumerate(poly_list): - # index.insert(idx, building.GetEnvelope()) - - return index - - def search_rtree(test_building, index): # input test shapely polygon geometry and rtree index if test_building.geom_type == 'Polygon' or \ test_building.geom_type == 'MultiPolygon': - fidlist = index.intersection(test_building.bounds) + fidlist = list(index.intersection(test_building.bounds)) else: fidlist = [] @@ -299,6 +284,26 @@ def utm_isNorthern(latitude): else: return 1 +def createUTMandLatLonCrs(polyGeom): + + polyCentroid = polyGeom.centroid + utm_zone = utm_getZone(polyCentroid.x) + is_northern = utm_isNorthern(polyCentroid.y) + if is_northern: + directionIndicator = '+north' + else: + directionIndicator = '+south' + + utm_crs = {'datum': 'NAD83', + 'ellps': 'GRS80', + 'proj': 'utm', + 'zone': utm_zone, + 'units': 'm'} + + latlong_crs = {'init': 'epsg:4326'} + + return utm_crs, latlong_crs + def createUTMTransform(polyGeom): polyCentroid = polyGeom.centroid @@ -316,6 +321,7 @@ def createUTMTransform(polyGeom): directionIndicator)) ) + projectTO_WGS = partial( pyproj.transform, pyproj.Proj("+proj=utm +zone={} {} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(utm_zone, @@ -345,11 +351,16 @@ def getRasterExtent(srcImage): srcImage.bounds.right, \ srcImage.bounds.bottom -def createPolygonFromCenterPoint(cX,cY, radiusMeters, transform_WGS_To_UTM_Flag=True): +def createPolygonFromCenterPointXY(cX,cY, radiusMeters, transform_WGS_To_UTM_Flag=True): point = Point(cX, cY) + return createPolygonFromCenterPoint(point, radiusMeters, transform_WGS_To_UTM_Flag=True) + +def createPolygonFromCenterPoint(point, radiusMeters, transform_WGS_To_UTM_Flag=True): + + transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(point) if transform_WGS_To_UTM_Flag: point = shapely.ops.tranform(transform_WGS84_To_UTM, point) @@ -362,6 +373,21 @@ def createPolygonFromCenterPoint(cX,cY, radiusMeters, transform_WGS_To_UTM_Flag= return poly +def createPolygonFromCentroidGDF(gdf, radiusMeters, transform_WGS_To_UTM_Flag=True): + + if transform_WGS_To_UTM_Flag: + transform_WGS84_To_UTM, transform_UTM_To_WGS84 = createUTMTransform(gdf.centroid.values[0]) + gdf.to_crs() + point = shapely.ops.tranform(transform_WGS84_To_UTM, point) + + poly = point.Buffer(radiusMeters) + + if transform_WGS_To_UTM_Flag: + poly = shapely.ops.tranform(transform_UTM_To_WGS84, poly) + + return poly + + def createPolygonFromCorners(left,bottom,right, top): # Create ring poly = Polygon((left, top), @@ -752,7 +778,8 @@ def cutChipFromMosaic(rasterFileList, shapeFileSrcList, outlineSrc='',outputDire createPix=createPix, rasterPolyEnvelope=poly, baseName=baseName, - imgId=imgId) + imgId=imgId, + s3Options=[]) chipSummaryList.append(chipSummary) return chipSummaryList @@ -766,7 +793,8 @@ def createclip(outputDirectory, rasterFileList, shapeSrcList, rasterPolyEnvelope=ogr.CreateGeometryFromWkt("POLYGON EMPTY"), className='', baseName='', - imgId=-1): + imgId=-1, + s3Options=[]): #rasterFileList = [['rasterLocation', 'rasterDescription']] # i.e rasterFileList = [['/path/to/3band_AOI_1.tif, '3band'], @@ -808,10 +836,14 @@ def createclip(outputDirectory, rasterFileList, shapeSrcList, ## Clip Image print(rasterFile) print(outputFileName) - subprocess.call(["gdalwarp", "-te", "{}".format(minXCut), "{}".format(minYCut), "{}".format(maxXCut), + + #TODO replace gdalwarp with rasterio and windowed reads + cmd = ["gdalwarp", "-te", "{}".format(minXCut), "{}".format(minYCut), "{}".format(maxXCut), "{}".format(maxYCut), '-co', 'PHOTOMETRIC=rgb', - rasterFile[0], outputFileName]) + rasterFile[0], outputFileName] + cmd.extend(s3Options) + subprocess.call(cmd) baseLayerRasterName = os.path.join(outputDirectory, rasterFileList[0][1], className, chipNameList[0]) outputFileName = os.path.join(outputDirectory, rasterFileList[0][1], chipNameList[0]) @@ -1060,4 +1092,6 @@ def createBufferGeoPandas(inGDF, bufferDistanceMeters=5, bufferRoundness=1, proj return gdf_buffer +#def project_gdfToUTM(gdf, lat=[], lon=[]): + diff --git a/python/spaceNetUtilities/geoToolsGPD.pyc b/python/spaceNetUtilities/geoToolsGPD.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f80857a460bfe1b6606d18cde3f7ff0bcaa5f9b0 GIT binary patch literal 26853 zcmc(oYj9l2b>I8W;6ac80fGbxK6l9FlGr7e+~uyQeNn3=KyoROAa%iAE}^BM;mid% z;5>l27hFKXxMVL?D*2mb=V7}XC$StUi4s>_ieo3T<0$%&swkDONlHoCsiab=N~)BS zl~}eC<@Y~*?_dCuD^*lRmYimH-+rDxea`=!?t3@?y@9R2IPj~tN-q6xkbghQFJ8_& zSLScvs?OB{HyyZI&Q0fBE$^oDF3_`_t9H5RZYxK;$4&QS@m@FGo5i=d=`C5j&rSDb z@vUxpTNdwk)BRa|z)f$@;)8B_D2wlK)58|eyXsC?+vTQrwZ}2yrbp~qm#gk}wLNZn zkAK$fs-v#9*G=#B=^j_z=W6@i^nRc2b=3o|cF;{9^yw|GddStDaMMq?U}Y@py5Ob@ zS^Th@KAgpmxalMAX1(79K~U~L1eHMA%mG@iZj4L0o$P2D~&>}Co@*#_S$d#Y4$XQn| zSmeXj%VBHtC5s=i_{$c5(&DeU@=+`Nsw*?`V@^XoZsUKgBWKKV&ShhM+LfQNzf^h3 zsu+JzSJj<`C)|VWZu%oXX_fLx_W*u+-G~O2wyBXQ@8&=1%BS4>LDr750;k;r81#)S zlkq%fokA!`^n!Z;Tc3Aj+JQb$=B%6Ra^(-(l*e89C7beuE5B@!3$6?uUNN$~YBRlP zIZWy`_khiG$&$OQz&R`MrZ&~o*hkobl^^ln7`xCVT?BLGVl`2!5jXw6bIr-IK##i# zt~V<6#3y?#SzVcH)O|)j8CRF8N#%N@BX=vgS1R>rDrr{gbH1c@zm?qhKy5A*n@Ocu zjZYJACe0`+)Eh~mQje2jwHlRA7H(Fm)xw=-RJ>P+?iZ^|#iY`x7fOwCRLW6DgV%4I zUwn=rac(|vt4_<%%DL6RHBY)#A~}hjy-?y_-tFZ{-6?P0Kgqk-NKB21DDl4`pu=(@ zUaHlK&6PsDP%K6DD0%x#VJ>RS#|!|%G?Z8<*2~4XP%b9Li_Ky!I<<9c!TNzXfjB1Vh(PZQNQl(mkps~vO%mVZ& z#Kqb|HHr)GRg$}ftkC6h;oP~xv(FdC%F%3*B`Tb|(Oin2wnh`J&{|332V;c=meDhY z$T_!APWnxCi-z9eX ztvG7Nr_0q!y&To=6qjnJOO3cz`NXA4a@xp086|Hem1-raMDff-@qTpW^ukJVw^4ui z@o7eNqtU3wm#$BoqR?JCsMHo3&15EB(V2AI6xh$hSVVDsgZZ9dPi`Ow3pr=pC3euy zxa;SMY~Zd02Fi=sV6FFau0@>94~ykoGt?HF?{@P&S)?~*O9=>Au@x8(!mO|+3277A%}_s{PsOc|=T$x3u*e_4 zuPS}HR-B9AK{$14Au3g7SF)^AJe;-t9ovISR6dnGD^%D-(JZ2)T)4B6@xR(DG~!bW z#pLd(c!AZAmXon8<4!TQ4K%i4fKNaD$>(lc6BTIW!eK3($dwMQA^{hQy+bYh*fzy1 zkWdRHR%}aLTVUHN)rkETVHCCxvXNLK8-+>CX0bjOnIvUzhXd3#krHlK!#foW5!fb_ z$cP(D%~G_PdqHDJ_k#E&+XxcBv4+Mj$PEYm!Be@d!NFi4R}!s61O03p>?X2q8^FMU z4^cJt}$BmO-m|36)Rf+~OJ6>T>J>

7(tW@FWC2iK4`fFKJ zT|cI&mYS-AIJ!G(e(I-_aO*hi$8%X>odlwMDZW1=DI~FG|DvhM6CZb?gJxq1Zcmy^ zlx#E$aih9qcqLxCQ>(ix>U#3(LsJ1ZjA#@X6z;;5vS?aC}X7 zE0uz;n;*Y1bG@>>A?H%mIDVU9Y@Ewa+m*T*Z`5ZisOnO5+>2UkG!52u7=!8VCdoql z%4u|Zt;U8=R!$|21!1`6DY8yi8}+$Tqn<=@a{BdJqr6m&&b?iU-n-sx+^I&jsic@h zf>6hr<#W%Ued*;h#};bmo`3N}A9}vPGOX*Z8WoWal0tGf>RchMnO}jJ z!CKWs!nuQxkS&&Sh!*0xS6+PK%vlru6^6aBBqTRczE(NDN5ul%R zhV+XPThf!jSIej^#Yy2#R45tNEgV^EXNI~*POi(LGjeFTqu3FT--Oso&DiT34c)XB z5N-=`?Ufm(Z5q&b94C@3Leo&(aKzo>LR4Kb#r593#8g?eQ7p$MKNG1%1~id@+%Z+R z#F>c-Ip@?`(Kd%^xl(iztne<~-)L3)D+okblPC?knuT!lU-6<(jWZHPe> zF>5^UH7Lzd$keB-2IAW+?6+{h!tHLZ$BdR#xoU8ehLy*(5+)0pm4*qwhLEs^T$~j| zx7}WxwVXG6j--y}J0q7uD4N?MSb5}qB8=rL&m@54STfiIMbW}L7M3lHtjp!b?5ssr zhYj@@msg_ZxFDxZ9&0?Iu;)47diSTXhM!Q2zth3_f`u#*`Y+b3% zWEk1Y>ikmFIHP|N@=^8Y$@BuNrw1rLk{ikO=K2v`*&jfnXdxF(n}=3N{4_kYj(HG# zFb^TjLkRN_!aRg94`G{mfQ?;jzzxh3ewfjOFDZCg!7B=0Rq&buQ8<*dCX`Pqd|kmu z6}+L~yaMfuaDre=vkZlh!;1@wju=t_VWVm5FFeNAbvl{Q@G9&^tynb zcVA6zpp!R1^XgPP_|DSoEE`0liR$;GDqB0eLSjrgrW%&V+3cyBSV`lmxlmc2sMI#J zZuo~MQFTfl2wSKz3U8KgN7&69m*)k&&o`F;fSg#_E;opZ@5}WBhjU#yGx{rgfB2&x zl{6Va1aX$FNFJpDZJVzfZIQpYZ%qEOMy$xYtVNf@2&qFMnBO~nAUjWaAN>&Ip#uql`@p?akJXRVpXpKj8GNrYI6gYiVHJw zvQmva70GsElA9)Hy~2Cz#?`i*_3~dV*I0;+)g<#xc;Z>p@B}#dWzj1wL z>c*`r7fk5SR*Q*6FqQ5a90hyU&lm@n8n{)xN2UlSGON%SH>oYKBWtN62D{6~ez#bi zy;&*KjD&4l`GnEpQl%M{6Z2`xY|Y=*10B# z6DTZ}{jeF~)y>=xnxtP5_QFLpvb^r!CNn-j;BawDqm1_U@$Zh@q1=|CWeGn47StxiSUFU#9Sqfbn^3lVO%Y%|gNmdrCvoPs?Xqk-Dgbm|%>Y+k+M! z*~7Z02+6b zFAHL0m4}LTHl-@-m`|$dG0Yb~$+HvUrgK^t)~9g7IPjd|)dJi)!})3)ztJwodKONI z3C?}CRp9Rl}VgdjPQdQ)ogYFA*b#G%1!w9+}zQQL1&O5k^zyov~A5 z&&Q&W?Nsk=l+!J|ss1&h^X3FIuW~3Y5b89?C>xd(D=UzTXfwsiSLjdB%sa30!w8Vk z+@MTzPfF>nHYs&7BEn6q6C|+c-gTt)HV^O^9>XM+M1Z{ODTV?|$;(rkT)onR8ne>m} zp#n^oCN3^(S>!lyEXRRh>)OJI*~Mz&D&z6ziKG(5i*5vOI})UF8+yxGhrA*n(Pi1CK|qkx<&Mnb3Os!QK%T>mJu$QZKx@=BYqFhC&w=?H{>j@ir)- z>v*_Z@uOm6m@%PqVg$(4m|&0JDdxpEg)c#enx-Qg=umktz5m=_f}My;DgD0tx;b{Z zM^ZNrTPUsy&k%UzB^(^e$_;f+ZF??S#;@Q{^TF@BS8Dy#iuWSYad=FnKc&(J`Q_oq zeT4fWe1S~w{TCxM$cc+~(E*!=6Sa$6{+v{6nvLv;8;$WLe-omHlh2R*deki0Ok!Om zxKg|mRZT^eqdQA;_JGFVhL269(=SfuB}Zejtzq{OiX?0f(_G-d7_ z%$XO;6-SNnndkY=1pzs-7C{@^(HG)pGMw(xdH(v(`mdv)q zm4IRR0|j_hnsTd-X4e|p*3r;bHQoHKTis?2S<)J!VUGVS-6GPjb3nLV8-Qui+;#z@ z)_}zajT}QFheo>Ht!{VoJEU7k2CD;Zepp#bXy5v_ZO|>M4nC&sp|w_9JKQp!y`6R% zw!_WuvaWJCVUM^LZIdvstxv*;w{wQ(FFF00T_i{@I{4l2L#$G56hW`NAUydddd6+s*G=H{ALCn#$^k zn?I0FfN=$jZ{%(&@1P#G_PEtOZXQRcjkq-`mqm-QjJoDzo^bvN6~lX>W2}FjTeVK) z*%;6ygES2z`SfBvul5R739o6FYyMeI%~($R(Fk3nZWQ{;+Faf1+sqnh*HhoG=GXdS zteOaY`Ks;i>xHbfcGkusO2a;^gV=}O5zz?y-8n5J%e~vL?C#v+?%Yk2yxZBZlkpRT zYo`>2rI?#m8}`59nDx?GFe{bR24znrmDyO91tN^==}}vZOODRjg+H)6=UJa6ZCBk> zp_tv&3;!Vx{6#GZ*z4CWPu}n@xWlR;<%pyHN^u3Zz3f^NoTcimh%lNB*Ngh_A5kFu zT?N0VKw!-Jq|$`l?m53=(pq3;BI4eDq+454cF)+nIL&CG%0aD3!*RP()KNfF@|k`( zQtt2FyOWwNR{tV8Yl#zAxs|eEPeikIQ!7)Eq&&)^xnfiCZKYL0vzO8tUaUmb@;FX& z?@y;C%W1ci)>J9eJZn$2KG!bZ2qRuy(JbjRB^NRst!4xz(`uORp>fd!YG%at&%`A> z%SNo{n$=~m8@+E|3*S-3XsZ#B_XgA(lWMS5P4i7POW`-j34dRKPNKtqs^AX^tZjec zd!}lOE$XHjF&g@xsgSfRjON$UR(4HuPM9{;sM{rcmTtQ!tG&`bTcjr2meS{>Y@9C0 zx>9SEZ8}-K6fLA}u*@h-SFvHvYH@7AM76AfWoQN1=7~I!Fy`x)vuadlr_c*)kbMZID@Ls#V3!K>NTKm$g3%V$T zct-J(KWrA`uzws$=bS)GYjeNhO3M#OpiiY)sTmFdP)5Ptwu7!dq2+!{Y zay?<=13>^P$dugx?1CP?#GF^+i#e1$4jWH~PXsjII_&BvBr4~@5ngc~ajQq9Sky}E zNw@l>DHD$x4jXljy5+Y$btXlflrB572q*w60~#?l#sH1tF9s?E2=F9)xJ0n{Mg6@= zr>&<%iRQi>kf-_m0MufZ3w8`qwYf_UV-^%FNCrm9F1i40vUUjz#d7U;s|RKHV2%LA zMt8|$d`u9kxts&-&9fK>G%mYduhgM51>_;6D@1w*0wZO1mJK0o9SFi=Vb*lCe%xCB zYHsxy6<3e@AuoSDAU$#WYe8$wE&oP9{>1HH4O&mT)iL$5r}d0mCDB6+b{lZbgZWjm z4jS6{^BfVCu&IpF1QT4P+#!2R(L+wvY%HTRx_B@z62LaJPP0@!VaD`HW34yyt0x^m zYIUEcXAqM#lR2z(^BYF*C=Vj?^d57qXVp&Yl$vUtR%@;2Y@Yuv54I9*?5$2H1@I;H zTuL|ikkqqj3Xn_cbej5L3a>t;72eZ=1-R=oXxsG%c_OXng|c`|B!M7i#=`crOSg=J z?s%`4jamj0ap(a4;XW8B7x)Cb4eNznfI6@0u0HSH@9k}!ajQqo@pDEa3l_hT5Wvo`)R<0Q!*0|gzmeq+bI;b-Ys55_-xCkipyzgJ4GbM;>u^DtM3#z zN-+0TJBwmu$#|M*6e`K7Sh_-uVMO}&EQ9veXH@F%tj{Pew=+DJlG5|HJ!Q+C=Up(p zN@GgbR4p0@^OY+mv)s~`LMj>B;mU^{a`~!4HrW*k(|dZG%apuGP^rybE|1CW*>R!q zHX^umFA-G*Hq)TRYQ}zk=w+1V@uSnS;j8e=YE5VwTw&xe zXPHrOsrpX(?=Abc+Vtsa^H%u%Y!GV{S#tv0YQV1VWRY7w0ykz6@1oD(pV3Xr+DS|! zfzZx1Z$cRQfj>o9qiV-9F|y_9^}H$n0UUbfZc?F~zz?2cS8or3)&{unDa8C7MD#dku!Xxr`n8wEI((m2}y`h-*c~oUeSWpJ+r#v}UQEJ4v+L|K4 zT>*TI+bRW*9W6cv3DQU>66Az)BNC)Uh6E3HBEbcLEFnRBGU#q`)fZuQyK{2DkAueM|G2%B3Bkp>9iD2=Y`fC^w zyah)5m|Pu98X(5?9-1;}h@uB7kiq5O4rs{2vb=3*XkI!G4aelI6K>2vA%rKDfkM`R zR0awGKBQ7m2n>jeXE#}(mBX$`zllWO2MK9_>!vRjZ(;0+)Lcmv1*-T-o-eE0?M z29V>UDI;RA_j|XrUUa}~o!z}?!(IH79DhZ~o8(W8=^&sJ!U-#Q94D3V7YODIh?xTQ zN?l+eG|m2}ib=hD%^|fFzi@3q=MC}LLFMyHk*@NnE4MCP zn=D+odi};NbNn18*IchE@(ZjxjvtA3#68M`^Bx2jqN3Nuq3N7nw~QQ_5)u#Kk!QzaZEsrVBFez^bKfZ9iqoZd#D)!U3I`LkQ z5AKjQ+8sWw-Xl7`x%@SmZPbO1j2?f5-AKWawj(zxA4r$!Z?(iv~U(#iwow==Y-RXb_znY=F6cUkTM2&-+>FW9+ z(VRSAEQW@K$uH!_&~~pMc}KCd4DKO@k7nN{tV1;|@$clYul##`)~tttxCXHkK`xBo zU6eQ2&yJZucixu=@R&!7(GoD}psN#KpR@ey!dDZw|BCti+8ouJ7cU($7_+vtz_CtV z!UI?bi9MnqUY0f!QL2+JQmVdP944X%kJ&b4F(vs>4w}a0q%j`^i{H-Otguj{JsK1)Hg@LBc(YWS@8K2D8|ur)h%tiGdVI@TQGGjaHY+0-!Kx!|Iv+3XlKP9HW1|A@#&b#LnVdpsDE?DN_+FAZagZB5~r z{xMBt8q$>QdX!;CaH(31W6fv{bA%^o&c>A5yxh+-4QtMXj3#DTzo?GC!8mMcrCOPj zWv({Lqoi}1(B5LQ*4H%oBb2zmriy{>$tkn>kJYGn0b~D?{~XR{t51V8J7Kc;(~QII zkV!19b`;|@kJ&1tR%Y!p#E+BL6PzLTT-Sl%2@XTL^Uv7RA`6qKvpW!Ed8s`^(8SY2R zNmH00k{@4J*IIV3fsLPv9YiByhnE>a*V~=$2efV8>CA#c9{4s?zkReZ)M@XcBoR)X z_U?KmIp=QbP{V}TyGY6GNJX*|A!LUFff8JSKy@Om{R4lg0MCdfV)9n@FuvFJ>BDzb z6k#fx(--qLC$<%5?A~C;OxbI<9=y$I_OotO&=WSv{th{dP~OlNX2yZ69Zc(kcEDj$ zJLovPK&SZ=iFO3V2eyC;YljzeW|+2*Hy$n#EY9ig&E@~3jwWt@$0Yk9yZymfk+7W4 z*kufkKp4F!S#Yhxu64u?G72)VP+PJwxh;Z#@0BK{0c=ZNOlA3wn!U}L3WpjH3SyGO z$&9D_Q?kItDXn7`!|-ikg_$#bF9!#T^^*6rfUPR z*EV4oDP%L|aEia*+Y>DQA@;W}aS2Oz3VvjTzbyi1iK;aN=ijQDyvA!*j_I0p?|NVw*)`+vD=PRM1>aU+wvINX zw{^^#Sa0i?qqyzZ_1BMO>ZJL|TsufN(P*N1O+HHK{lu1zjtA`Rk~n-;6Z~UUZW}h? zDdnA3aDc$%cjj(2Z>fZf89tWkC6yXK=CMrUh~Jud(M!wzpB4YlXUy6$x3g)Hi6|~L zD`u&B@2J$M&Um{}C=eQYZeb^4F`||_#IX5{z zGrLEhcjK_y9H5f^`?=`mkGk7dla)>F8Z<&h%~!szKsKsf>=w%t_tIRt-gb*}(hsqy zfY-Hn=yj^lMjZamL#hvLjhc<5h-*OFcr&RgCRNu-q*H{ii;x0kyi%ya>nAug&d*+( z@%jq!BW#FpA;f!Q+DEcKyZGodUnyS9zDy->L|cN z!N#mv@|SK@W@mlQdyQD*xOV-<<*S#cFU(94oV#^lstp|l?8dKMy&hhenmYHf>lZFr z`A#r-YyA9pXh(N9E?>DYq1twlJA9e3gs%|z_Z2SDlHnKtOULI_H171g!ha=*^k%(v zFicne0P8B>)QQU8_y3QzSXw`qI|PU7Pf5(D-QT{d`QONCFB-alS8dWax4)$?Iq7v^ z`NMxFco#jxU;A7hY6A-cUBvd6dGtvsc3luQ;DLD6#~SR@o#33c!hKdaA3UNz-XTHL zWX-l<817_PgL9yUZnq15O55#{WzUw7hLnAe_QXiFswk@_9i{U!oUmM zjD^|vPmaL@CV47zyw@1b*xU2|<7j5y*wO0=3fZM-QSdf>4O9prE^0~24j2DUn!zT&+yAF1aT<0AE+tq9$-g}S>&TI|3 zG$m8xE4^j^xg%b^j5qx&Dgq*WdaKG8`4G$3Ian>m_BrEkF~|5hp1Ll*f5e(_CqB<$SeGL5km{3O(8Z|D*yf>vhn zmFf2ha<<#eSg5T4ECl{>sfml$b5z*kmR}HdoVd-U4QvPhjtlV4ygDuR9VZ^_HC#08 zTyFwNnmi|_9ta_;Of$4Y3~~QkJ`$3PqYi!bO;6t zm^J_tNib}8b&Lb5Jd}^CMguYTSFrfEF*xip5>5o3Fp#sFGxiOf&uj;6&K3tDL$Rv? zdAbJcd&A=@C<$BQH9Ai2bylsUM;%Jq&hW^|u2ytBxvgfT%I*Chv(Z(< zzItjKF6JY&RlW@z#wxV7BY%secS3wx!ut?O-~Sb=Pk;}Kd~))#ed^1<4`&}~$y&<_ zq>rSwviC+C`k;$8sJ;@zZZr~E^!fB_VY4%9Go1mB8f~g{d2LqUF*ekzS@@5eo9o~L z4Tdvnbqj$WD%%>gl75za=-9H^Ome{3F!-0dkG4qZ=qYunJZn8`Ke8G#_9W^U*Jh$4 zF#ryn(h@HdM}BR!2s(d<9{w|fxISFnb2Jsh>;N$A3V&H*|egB&kl zE6!ViN4rLHJ94{uuEX;Y${N^`gb)Lplz4;4I{AQNMl7JK%*xh2L-l$So2_v!+caib z+l4Q^QzdFohWx)1{1>48cd(4p#1|nF!ufY!)VJ3c;wx^t2(4Ph4k$Er*FbD&Y$-mH zOJ{RmV3Fo|%z|9I6g~m@+TZmU#ts2>ix(ICKD0L_G__=nE$BRhTSiRs(%pb<1FCoI9yP>iv+frwZ&w2 zrr$)<)uSvc>(WG?GJToN+(U)HcA00X zkVnpVw*!;*ho<&`PX;Zl_`^{3<2|48eQytZQaQ4!sqFW;m(ZHM39mHssM1;q?{ls+ z{583k$+1uVnQkysy}F{;ChWz4P|O(qimGZ38y}yNyB3jR??G|{@)0G3mMU2>+r0Z9 zRZIdnw*L=>0kfp=s6y6OM@dr)n))oS-X;2<2-w#Q!UeKIIs8N8&#x+JVDf-sQd6N| zQYax9o>lO11y>Z9iknibqJZN!7k);;&noyi1)o>&1qHvX;4KBeso?J^_=7Trn7}8t(`Tkw~yZg5H9qAkF>lxlRyl-Ut z=+XZEzAb&*`nvkM`+EC!^&RTl(cjxY*gx2}ZFu+ab$uQr-~Q9T+L`YErR)j5ar@K% amcHYlF9!5~WVEMmYu{1j_kU=x|NjEFI}sHC literal 0 HcmV?d00001 diff --git a/python/spaceNetUtilities/inferenceTools.py b/python/spaceNetUtilities/inferenceTools.py index e69de29..9c99ecf 100644 --- a/python/spaceNetUtilities/inferenceTools.py +++ b/python/spaceNetUtilities/inferenceTools.py @@ -0,0 +1,156 @@ +import numpy as np +import rasterio +from rasterio import Affine +from rasterio.warp import reproject, Resampling +import types + + +def get_8chan_SpectralClip(datapathPSRGB, datapathPSMUL, bs_rgb, bs_mul, imageid=[], debug=False): + im = [] + + with rasterio.open(datapathPSMUL, 'r') as f: + # values = f.read().astype(np.float32) + usechannels = [5, 3, 2, 2, 3, 6, 7, 8] + for chan_i in usechannels: + values = f.read(chan_i).astype(np.float32) + min_val = np.min(values) + max_val = np.max(values) + mean_val = np.mean(values) + values = np.clip(values, min_val, max_val) + values = (values - min_val) / (max_val - min_val) + im.append(values) + + if debug: + print(values.shape) + print(np.min(values)) + print(np.max(values)) + if debug: + print('im[0] shape:') + print(len(im)) + im = np.array(im) # (ch, w, h) + + if debug: + print(im.shape) + + return im + + +def get_8chan_SpectralClipAll(datapathPSRGB, datapathPSMUL, bs_rgb, bs_mul, imageid=[], debug=False): + im = [] + + with rasterio.open(datapathPSRGB, 'r') as f: + usechannels = [1, 2, 3] + for chan_i in usechannels: + values = f.read(chan_i).astype(np.float32) + min_val = np.min(values) + max_val = np.max(values) + values = np.clip(values, min_val, max_val) + values = (values - min_val) / (max_val - min_val) + values = values - np.mean(values) + im.append(values) + + with rasterio.open(datapathPSMUL, 'r') as f: + + usechannels = [2, 3, 6, 7, 8] + for chan_i in usechannels: + values = f.read(chan_i).astype(np.float32) + min_val = np.min(values) + max_val = np.max(values) + values = np.clip(values, min_val, max_val) + values = (values - min_val) / (max_val - min_val) + values = values - np.mean(values) + im.append(values) + + if debug: + print(values.shape) + print(np.min(values)) + print(np.max(values)) + + if debug: + print('im[0] shape:') + print(len(im)) + im = np.array(im) # (ch, w, h) + + if debug: + print(im.shape) + + return im + + +def resampleImage(src, array, spatialScaleFactor): + # arr = src.read() + newarr = np.empty(shape=(array.shape[0], # same number of bands + round(array.shape[1] * spatialScaleFactor), # 150% resolution + round(array.shape[2] * spatialScaleFactor))) + + # adjust the new affine transform to the 150% smaller cell size + src_transform = src.affine + dst_transform = Affine(src_transform.a / spatialScaleFactor, src_transform.b, src_transform.c, + src_transform.d, src_transform.e / spatialScaleFactor, src_transform.f) + + reproject( + array, newarr, + src_transform=src_transform, + dst_transform=dst_transform, + src_crs=src.crs, + dst_crs=src.crs, + resample=Resampling.bilinear) + + return newarr + + +def imageSlicer(image, strideInPixels, windowSize=[256, 256], debug=False, immean=np.array([])): + # slide a window across the image + for y in range(0, image.shape[1], strideInPixels): + for x in range(0, image.shape[2], strideInPixels): + # yield the current window + if debug: + print(image[:, y:y + windowSize[1], x:x + windowSize[0]].shape) + + if y + windowSize[1] > image.shape[1]: + y = image.shape[1] - windowSize[1] + + if x + windowSize[0] > image.shape[2]: + x = image.shape[2] - windowSize[0] + + if immean.size == 0: + yield (image[:, y:y + windowSize[1], x:x + windowSize[0]]) + else: + yield (image[:, y:y + windowSize[1], x:x + windowSize[0]] - immean) + + +def imageCombiner(listOfImages, image, strideInPixels, windowSize=[256, 256], debug=False): + if debug: + print(image.shape) + newImage = np.empty(shape=(1, + image.shape[1], + image.shape[2])) + + newImageCount = np.empty(shape=(image.shape[0], + image.shape[1], + image.shape[2])) + if isinstance(listOfImages, types.GeneratorType): + listOfImages = list(listOfImages) + idx = 0 + for y in range(0, image.shape[1], strideInPixels): + for x in range(0, image.shape[2], strideInPixels): + # yield the current window + + if y + windowSize[1] > image.shape[1]: + y = image.shape[1] - windowSize[1] + + if x + windowSize[0] > image.shape[2]: + x = image.shape[2] - windowSize[0] + + newImage[:, y:y + windowSize[1], x:x + windowSize[0]] += listOfImages[idx] + + newImageCount[:, y:y + windowSize[1], x:x + windowSize[0]] += 1 + + idx += 1 + + return newImage, newImageCount + + + + + diff --git a/python/spaceNetUtilities/labelTools.pyc b/python/spaceNetUtilities/labelTools.pyc index a5d7411ef3b3798caf87f81f1c0357cffa89de27..c9dda54a418111891cdb06b0bd4694b24aee3ce1 100644 GIT binary patch delta 10020 zcmbtac~qR&b$@TzG;9NieIGUrLPCJ7(PANi0Ig{Ige(a!2+cR3837~Be8`eBlR1eL zJ82u+@^c(J^_JLa(k6|aL+UhboYbyQ|46fViL=O2yePI?$MKrlJvnjO-|u}hfB-3{ zr@;K?zWeUm?z`W+_r8yBzbQX@PgbP7pOe{ezVeVyBz|)6-Gxu&$#WHN#H+-3wHT}x z(iFmUh_MTYq?DF$1GS*L|p=pHavOtDdmHO2K8 zCW09>>uOABsW_W02HS+$qMNnPYG#kuv{0+%x>bi3#)@0@5?3aRD|GRyWU);bSBb-B zyB@=vtmx1ctCPi*y0}J|S81`eN&YI$Unfkj=3gz$)tXo@%r% zd^bg2tDYF>Nb4Ow>F-yEJC>WrT7< zg?iHINv|YXrQUHCHcWS6G?3XyXd*NdmJ!^_li66blw=FQgO3f7OQ+^hPiB_Owy6u5 zwJBNI6t5z*sztdElq?2mk-5ch^$(x+hkN}a6NXEDFZZ7Gs|jUYm_^$t69&_ zO|G8B^w4@fR~SEUEKR+gS0cAeW#p$jq)+88?oaP#KbzFy#dYPKBsUQD5?q8lLcY3p z@wq%aKEm)5MhK&-v#`cfL$VfNEdq=MCj7m_*03c}7`eh!Za7JBmggJ*qt^6pa8ONN^A?ouZWm9yHc>a>73rI_@`)QD%%VPM9Es2**`dNl8SZRRB(P60bcJl5Z+^P1uOFexgHv_Cxz^>4Q-`K$tAFth; z2ewUE{!xF}*sjjG*T_os19x-V-?Kg)5DEK+%!u&_%Urh^v4+NI=@Y}_ek+BX@JZtv zs=ls$&T=}`_v`ZIAEutE%W}x0>O#X~(Dj{-opM4w+t?tB)Q62NNi+5|)un4|38+0y z5qVxc)zp^$3v_P$QvIQ6&77X8s7!U=l3clX>g!A1mz!yU#?ypn2+sl{)K;eP9Lql? zJWu#Jfy&fo9~d7V^$!J1;{~#RrCx2W>(izxP3&u zMD{C$pz2>*CSO$}OKUd1PB|Lus6Vtl5}GjnlM-(beo4@ae3R7g2yX$dV80D54f5)# zZ7nN#48)cXH`zK$w$!VNSi@lpMj7E|xB4O{jn`Fb>(Y*yMZJs)%L(vLOpg1*!$G=(ooOw+tq!-|B;Qvrw1(tcYR8Hd^S0rI6>fRM)JH3>maU&bGmk!-=REM>3!{rb zD>eRt@R#b4uSK0}A5Q-wiwD(zx7X&;#4fw^9(iu+;g0(pC8-i~=ZPDdw2n_&d&2&a zz$wG04z7A2?=^Ntg&Y4tIIl9ijYXF(+xUnzLDlVTOxKGnQe)n7`L?>nJ0Vlm2j14Q z8jN7nau}-UWZ2pniVO#Qa2TDdzbt>FF0NiOPZ7t~dURB$=Z23FYxo(%+ zg+52oC4S!7=;IN?2a`JM!6)*pQ$4h9T@xalD7?N?M03O$M@G!Wr#6@-q7HFJidY_r zSiY*+Qye#Q=zvEvM67@v#}2WR!l!f@J zoQ+~-!opC94l8t*;{N*#wKJPw-E@AfqnW)N?Xc zEuz&Tj>xqvhS;`I#1P&>7l_G$b*wa-L|G8JhHcf0D5Bf~$Tnyy15~4?T%ejXwFJ}> z5ruJi#IY0+{&q?f!cC&Y%n(H}kBFkdl0>PQBpC5hR-#Q|Y-xfu!)Xywv}zHh98^;! z913lhf`{HTRf0jWuL+elbD^68hq6T$T}c^xg8C)1IT zIq{m~fCY=iqq>r;+sZ|(RYY4wutHd{Ogts_^Lqft!QO*N0peS>@_`6D%~c|{LReM$ zn9@SAHW6(TK^QUizgMmq%Lc< zFwIZKRwX6ZX-Th$dgEI3>UpB8bu(mD>oo!0LF4FgwFtrj*08-bxa-k|bd#NfPFOkvE8AsUrOQG|n8_U5ouk7aK)%qX-Hi!vcP4!5+oQ?X)yO>Tv)M2(bU#mqByOPUBWoDNsqwoaI zM99RoklAf_0Ci#HF5NpU1T!Oci4(t1!`}gO6V_<q|xGw&1ORa7?yCg#Ea`YhfLr*NWNm$rQ?9(RBAl4(I&{p>>RqRPBc@l%Y(2=0uXZ4H?MH>>W~VeLLk^_3=J;t4;-dW08QFkU&7|gS?KLNBqOqWZ1vW+%-8Kz#}Tu z1AQB%OTE~4o&1bizqz{1_!hVkPPuykbJ?_ips!Q+riM26t4BBQluxNYZ7$hx5fUDz zm3C0A?`d9Qv7LZVBJXEUI_0<1>7LLevgte3zAbh2Pq2!YC_SNZMDu=IVrO7t=kO^z z2??sZx2#T2WDU=6Y0Aew+qv$I;Q$(ROI7VFX`N1S4fdwfTjNPm+X-6;=Lr8p*sJz# zUA*GneS}?vJ%n2cw-IKtYQ^H`2zL?2)Dv6VSKLdoo3M!xAZQJ7 zYI^yn#>X}X#>hg)<8e&k5#l;MvAZt@%3u+PR;ZZ>T!u~Yhrqe%U5o45E8 zcEz0^4@}tKP}2cbx_w8sUQMEfI=X%9wV#BTF~klJ6NU*#*mVXfPW9kM!ynf4=>$y^ z_US7+*Sx+70!+P{u$nTy{7{MwwHG?y-w%2J0*Ht1iE`l!FUMHa~%h!@H-nJry1 zPiFDgQ2;u}kptlz@UmrtBQHg*-&rR;Q$stik#e7UY}e*uAEu?R$Bte$NAwV3!kv;##LoBN-hMW+T6BJ_Ya-^2|AgeU98Wa*PNK_#Mav~q9VTy>*%r4cE1f!I7 zIvsM);~*zPqdKHM=tff|>>W+r;Gr{3l_bZBCk7Zz<$Q7E(sq!H0t>|m6jd)Q#01Ff zbXLk73u~;wM3GsQ>M*TVZ#s$yv#~&aVM+YLBKd`7ok`PMe5yI#y!CEA7KKBLj55Jok@g_npRMn(E<6Hu{!PGA=t+gt({j?ljeF5IK; zFqRyA7mu3Jhez7jOxQx0_TTyhw$I}2sIYx>$Vz;VO#5v5wK*;u9=lQQQ2G0cXhHw!To(k5&P%rCy z1dEfTj*&V^>P7+`tZ@@5+v~FU6pJxJH;Z#T91k$#X4ZZ{ z>K0O}WuPqnSyHzWZX?`II7gUvbjBSl77-NTPQvF1cM<6DjL(z0o1lHp{h;*KG<+3~ z*oAzY@erjS&LMu8@HYVUsexsA{|Msp2Y4I3-L!8?oY8MOuuUIFg~%z8xeo1@oHKr@ ztqwjV;f&_EqD)7TqX2#-bLx}(ccfI^f-dyV>zjPz(6?ycY1~1}`&k_m<`n^~)sV>5;xLx%&q>#La@CCwO6Z9(<1}dI<7VlG!9x6@O zp8HGc$A?Or-Q<3S@KwU!68?@*N6>8=NPSJY26ts&R*xO(7lZk7zj|ZvUpgP4ypQl8 z;by?hiwqBTLm2VAK`w2ZuX#h{o1NI@k56SU6(KKRiC)-;k>zhtHXiDPTdi^ zG#EqKi|4v^P&|paX^khH6?4ZT?}zI1*VoJ$(4$IjD3cGV_8b1KosWi&!-KfV*Th(8 z*t9SGwQh0X2;;|Gk5>Qv$}?0yufMMk)yi*7{b=YNB!%jGN8D9w(JkXeTQlvYbe*#SU7-53xoT}{Me5Bu&x*M_ zrdmck^OXCokp}rk^~;e*A)qY`nRt6@{0tI0$HHrsA+_jeX=+OrkvgM_l z{%`u=)PBuLg0NxUtL$Y#oK~r7Pw@MHMm?GA>~iWcUZPTFH1jHpeA+yV*lb!o#>=dr zR*Y8&7YS4eE6CSKc$je)iOo@#=DYggnpD zIQSLxkvCQl@4@@8(NH+xkJP{jBV4&aN>C(=WwFCYyDY?q8{dOZ89)%GXuMD_DiBf>n9^zZ);!IN_uSW{F@nLtpFQM@=b{V5;4XY2vdgo-^6`aGtpH!ha&)+_DGXU?^Dt_WIu|M`OHBm(`1hPL zMDkv|^olMtI6}CI@F~JS6MjNq zykNXd>O(>AE^=*lr8_fS Ki=0``%>M-nX#FMt delta 9354 zcmb7K3v^q>nVu`hPuZ5@`2C1qv7wC+=M(#zzAO@kzz}Uq$CEU zqeGJdg-{;T(gR%{WofsCQ<^|K;F!9lnt_@2Ge$QK3ew z)L@k|ETyb0H8Nj~R;$5kJMB;-HEOU%F%NRB8mvvpb!xCqS=q|UQKtG=xl!b~A}^$zunLqlPs(_dRVbe#NiGsom-00!t5{hj>hv5n zxL6%RQ%WVPOr1_sgU!k+S5}2+Dn-+xtSWU{slis|Yg5*IWmWS78C6k({-7Im>U6dm zOe|4Wy;!$P6=*ox+bDJm1<@=trAe$il(krlmrgNmo@Lx3#>>P8-D{QpEKeodB-v>v z6Utg5=`LlpOVuktHMo`oy35n8f9ZI{naSDUJ>eVmM#pbVdm_y^pxbjgGIkuldc;4hug~0I6p#Nf z^KN5lJq*nS01qdTSxhJ)loAREWrT7<1));kk?mPiMR7hL>I8TrM}5Ol(;M}N%!wM! z2HG?d7805WErdG#Np?fgVv5ZG556`;W;D~IZ_g<=I>sN#sY!F@(VmYq+w{wM_m>nx zjOGIRe9?aIHNNmh-@%~i)@SnX&S<4smtO38(x}lNy4Ed{_SR7=&6s$$H0WXt__n*! z*gXD{JHugE`n7`ojPXV;xLCq|Dy0D72*E*^-Ub;B_~X(OLElK| zsLvds%_t#A2oa7FjsZOMUNoB!J{~>1BQ$c28DVCWa9m-~%@fS!bn*pH^!p=G^Ca`e z^yZQ#_q7l&=DV|ng-%ww!#{jv#COlQQF=ofdUjLA>I}Qt`k9JFmvYWU*~vpbOrc2B z7rt~X>I+8gd6>^!4*kg^SNHpazFpCPWjuTI%F5pvqk3`Geq+D>Y*mj@ zr+-|vz&T|y>OWS!Fx%yqFU)Zb?AYNS>j{Uw*K9-w|7m<(b;2nB93Oxvsr&{3UuAxe zutL95Q$N&0@iss-9S}L}g|F|RP7}V!j2yt;us`aPs_oJ0Wo{pH=KuymL7&;r6m7Tn z?%o`X`VRTR=2m@q?Fyqxf1!5K+(((80f>Z$hpdQsR{vY=eB)dCx3%pvyVI;cQ&(dA zdHkL_r^E2;uPyjTc>9Wm)ka8vxnY4(tbfq3I5mHN*HD)svnZg;8Y9MQ`qsuJ8L!|m z=BxV2#uYQG_UOwOmKsIl(S^S^dg%@G1;Wn=F9IU;QjYl&)8_~;6Mjyh6QfxG|ETwn zZ^&<%|3URH^fwpP4axkRoZN3SM`pOp)YqB(Pr@63Xd2*J|4}&(eMh%8Rj+-QS)9VjEb~3c=KF+K3DWD|Qu;5#hk%RC>i3%#7^}zKi{-7Uh z4H+Nmg0{BVWAK%>TH~tmC)-vTtv9ok5^QFK0qB)EF+I&&m^`D~+Z!F`m-Oy-E8{_G z_Up&mYuslcUbw5xdS^$8p~w9l_c}__F~AaSL&u{>k4JZeeFyzxrlqTv-|zl4YowRW z-wIWHHTJ3skH?1+vvC6rCq>4&;Q2 z7pmAilCdLMD)OR=fjWsCeqW5h#GEXD08N@lV+8b=O9k>s(y(Og+F@WMwRk3&RX)qi z)S~X0+QbEo!7`G@$4;xgcICE%47dR_@Q6c&Z!uy@1@e`JJ{PD{;q6(CZWk_LY@C?x;pHU%Y%2gD@1s9_i zhFF5LR;DcNENL7|A**V7mQ0gm)_loIps^@6Q5||yoh&o9$%y4*Qmr%MV8t~mUP-ze zuM$6i48t4MDu%2&RTe0bc~z6@c|Gfh*Q!{hXuy&eBx{XTr8EuHBXupcB=s2G+ei?4%PqkP`+-Q9I90Zk$;*q*=sFeXQGxX;DQw`r)PkYM zhjG)|t~%r0D%OqT{0=abfrt}09QNUS9XwQXUEKUwU%qi&_G{4k$^P_DHr8wBrRNIB zU*QU+`h5qZLbLDe9ZL77(PpT3o6)Vm*;}js+`HCr>r46$7&q&?`{t+nv0RjXzOO&s z{0B;vo3`!wA%ti?z#9yPqCD6K2fU*;arJD3-X?TByP0niItchCPoz8x3mvylbQ?m) zaiII6{{E)A`X`vh^V^2dD43+rw%FzmZu5@WO!$bNv$-oH$y|FjH}2tKKn_|bd7d)Y zlPR*}_ThUSr5?h>@yg`ofE=yNCn;?uY$DuAP^`ez-`m`^(j1^-7hyMHJK;9M?S#oQ z-4~d=i|{4Fs9v+Bz3pC#lS~&JN(s03kYP&y$3{nb{Ubhc-Y5FLEe~aya?~i+qgy+R zNx8yuZGP=WVlyZGVu`e!^q={r(%>PAKJw za}bl{c-W^;Y%`4#{q(lhS+uyd0L79gq1~Y!-pH_bWY6{ue?kqBT!6~SH);)+(`97f zuSnlAu&s&`{v2+H%P7EKsZk2aBY(}>wSB&B-##xt*Kiqmjy&Y#8E!-G+g@i>k6*Wa zk71blXFGa}uVy3pgu@p;b9i!jP#X>fPx!*o$MvH-t1CI5_iyjtF3i0Wi3#R@&Y?fp z*!>*u= zg{S{Tcslra9_u~;PDGxbND*}e3UFV6PkKpMT2vBWcp*o1Cf;|?Ip@Y1$5g^}g6$fD} zlHIpVyoaRd%lMNAHiMsR`==b#FI{|i?9ayDy$EYN_xYFFF}eZ~7+z*bMhlbi7{iEo zY(F1#TO{n0HTufS=DXd9#I=aT739{tJaziaWeYn##{4sTF^SDS!Y0B*95y#28A*dl zODLNBJ*b^~8jLNveNS;>E3NwpTnZAOBQ9897e`Q^_~b~uHb-f~Fl+`X?IP?ZTt;{s z8J-H}9wEr-`l7+u3}K76Z>G$ntl=1evDge#l0!LfaZD~z`=an9B|8k$?q8U^mThEagX zl`)>Ve`{L#O)!*UklR1=b4bcPmU*)tInc7+zP+Rp->EK=A-I(~xkTp%YJQ%;{chTQ z0;hP_%h*d2%`k7MHK`tPrduYQ2FV0k6mawDY4 zYf=x!MwyRugwiR^Y`s1*R5f!~YSteNmEg*9&Q%Y&A7G&=mFw3>8dgh9tkdNEjqNIW zNm#*gY@RDToFcOxM%@#-+*>{UiO0M>#!)@){ZYG63wS~WaA_#qX(Z&e?2APih6Eq_ zuk4MC)cyM5tLvv%c73Ktw+z=9*NqPhf8;QR_1c5A<=v>#kFV{R88fI}-+M4;>EtY0 z2fOn!7@;JrC3TO7X;b|v(?+))sYN#oJp!;6el`j*4b?RaKzLh1V6|_~hu)kwV9FX}Wdf zf%8rMruBcFXV|`%*ir~K%y_9by2bHIntpcl`=8>V9M;6uVV=i9=F4=;q;Fnhk}K1b z#S)Xl_3zA}U(BBqULn-L1b0Q+8LFKxF9x3jtu5j9$g^)ISQ9RDv+x6| zm<_&EDp|OkWflk@vr2er-U&hnPX(MRfZj?70N2BcPOm4M5<;l)RU3X%p_yeA4NJ6)9IQ$ zi{D0-o+5zF8rD#Hk`wVqgl7q_5`IB=gD|lH$u<`m_8+|_3L>NMnw?Y3JVB#Lf)-+O zJu?!Vb@q!ic`7HP`7X16L-+(q2~=U}+pf^@@UU-^sF^p?=qAF=gj)z_08s`)Y#es6 zp@?0scuLL*@zIc;bNmnG@37df37`iLp0)$ks3z${2)wW-Dv$A`BC*BiumvF5!8?BG&y8r9Tl|2HI|x zQrW@uDoS#l@hwWiuzx^FsLqVZAsYdAA$yug%$=Mo|BZ~vXeQE($Pb$G)Yx3(A^qxD uZJOK2)&DcLtuy!WY*(5w{V#WmtIw6`a%N|_dR-aWIj*d1XSQ>y&Hn?73T%l0 diff --git a/python/spaceNetUtilities/vizTools.pyc b/python/spaceNetUtilities/vizTools.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d864af382a91557cc7f7fcbbac440ae7178193f8 GIT binary patch literal 1385 zcmcgs%Wl&^6uo0NP3oo(P*Gb12niP1&@7NxAVhhI3JHoZMWPW><;2r4VaB#R6VxDk zRrn@;2eDwok`I7$#|0v>C&`S@ea^k-juZY^iR|a=ha)=sEZ}($%PMRseu6ZS5i}5F z0vZHlLK=i5*cOmpph=4ci{2Gtn+6?^L()rRA{s=0T$gFEOz8rpElLs4rjN+ft6^=%!lHy&R4{`J|fA(p+3NJlggR1uvDsu!q2@d2F@)h&-hj~A&zfr7T{wnqU_$D@$8t2TGAu~{$8 zX*drn&x~7j+dXkIx%60ffoX%bP2 zV9*hYdY!6u45$kZ_Z73RTD)P=m^Y#?yUGu330UhLQ?Glxt*^_O0U<4|;rc9QSO?(PO3M6O)W(%su&5hSTF?Sw2gO zn4^6%TvwO)dpef6Z^bLPxsy^_ubKS@Cq`vnb7GQ{#`Ie=V+|K!{NCW1DU%FRLY2}o(M~{;C@@c=Axwm=tuetwT zeK^Q-ZFk_obQd~zv8U5_&#$PxijBmY=!$L-i6}TPF5`PXye=ZK6x!@^q@@U2fKLMVCBP>!KU}0$JiLegFUf literal 0 HcmV?d00001 diff --git a/python/testFile.py b/python/testFile.py new file mode 100644 index 0000000..3d01856 --- /dev/null +++ b/python/testFile.py @@ -0,0 +1,22 @@ +import sys +sys.path.extend(['/Users/dlindenbaum/cosmiQGit/spaceNetUtilities_DaveL/python']) +from spaceNetUtilities import evalToolsGPD as eT + +import geopandas as gpd + +srcTruthVector = '/Users/dlindenbaum/dataStorage/spaceNetPrivate_Truth/AOI_2_Vegas/srcData/vectorData/Vegas_Footprints.shp' +srcTestVector = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C7-056155973080_01_P001.shp' + + +truthDF = gpd.read_file(srcTruthVector) +testDF = gpd.read_file(srcTestVector) + +prop_polysPoly = testDF.geometry.values +sol_polysPoly = truthDF.geometry.values + + +truthIndex = truthDF.sindex + +eval_function_input_list = [['ImageId', prop_polysPoly, sol_polysPoly, truthIndex]] +print('Starting Eval') +resultList = eT.evalfunction(eval_function_input_list[0]) diff --git a/testfile.geojson b/testfile.geojson new file mode 100644 index 0000000..8ef1210 --- /dev/null +++ b/testfile.geojson @@ -0,0 +1,7 @@ +{ +"type": "FeatureCollection", +"features": [ +{ "type": "Feature", "properties": { "df1": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 2.0, 0.0 ], [ 2.0, 2.0 ], [ 0.0, 2.0 ], [ 0.0, 0.0 ] ] ] } }, +{ "type": "Feature", "properties": { "df1": 2 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 2.0 ], [ 4.0, 2.0 ], [ 4.0, 4.0 ], [ 2.0, 4.0 ], [ 2.0, 2.0 ] ] ] } } +] +} diff --git a/testfile1.geojson b/testfile1.geojson new file mode 100644 index 0000000..8ef1210 --- /dev/null +++ b/testfile1.geojson @@ -0,0 +1,7 @@ +{ +"type": "FeatureCollection", +"features": [ +{ "type": "Feature", "properties": { "df1": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 2.0, 0.0 ], [ 2.0, 2.0 ], [ 0.0, 2.0 ], [ 0.0, 0.0 ] ] ] } }, +{ "type": "Feature", "properties": { "df1": 2 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 2.0 ], [ 4.0, 2.0 ], [ 4.0, 4.0 ], [ 2.0, 4.0 ], [ 2.0, 2.0 ] ] ] } } +] +} diff --git a/testfile2.geojson b/testfile2.geojson new file mode 100644 index 0000000..8ef1210 --- /dev/null +++ b/testfile2.geojson @@ -0,0 +1,7 @@ +{ +"type": "FeatureCollection", +"features": [ +{ "type": "Feature", "properties": { "df1": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 2.0, 0.0 ], [ 2.0, 2.0 ], [ 0.0, 2.0 ], [ 0.0, 0.0 ] ] ] } }, +{ "type": "Feature", "properties": { "df1": 2 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 2.0 ], [ 4.0, 2.0 ], [ 4.0, 4.0 ], [ 2.0, 4.0 ], [ 2.0, 2.0 ] ] ] } } +] +} From 707c8ed9ad178bb32a0e6868fcc2e1c4ebedf4c4 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Sun, 10 Sep 2017 11:18:28 -0400 Subject: [PATCH 24/26] modified files for python3 compatiability and fiona/rasterio use --- python/createSpaceNetPOI_Round2GPD_clean.py | 7 +- python/evaluateScene.py | 2 +- python/evaluateScene_Total.py | 20 +- python/spaceNetUtilities/evalTools.py | 4 +- python/spaceNetUtilities/evalToolsGPD.ipynb | 375 ++++++++++++++++++-- python/spaceNetUtilities/evalToolsGPD.py | 96 +++-- python/spaceNetUtilities/evalToolsGPD.pyc | Bin 4120 -> 4104 bytes python/spaceNetUtilities/geoToolsGPD.py | 13 +- python/spaceNetUtilities/geoToolsGPD.pyc | Bin 26853 -> 26415 bytes 9 files changed, 437 insertions(+), 80 deletions(-) diff --git a/python/createSpaceNetPOI_Round2GPD_clean.py b/python/createSpaceNetPOI_Round2GPD_clean.py index 7f99fad..5219e19 100644 --- a/python/createSpaceNetPOI_Round2GPD_clean.py +++ b/python/createSpaceNetPOI_Round2GPD_clean.py @@ -123,6 +123,7 @@ def createProcessedPOIDataGPD(srcVectorFile, pointOfInterestList, rasterFileList seperateImageFolders=False, minPartialToInclue = 0.70, defaultfeatureChipScaleM=200, + verbose=False ): fieldName = pointOfInterestList.keys()[0] @@ -131,8 +132,10 @@ def createProcessedPOIDataGPD(srcVectorFile, pointOfInterestList, rasterFileList ## determinePixelSize in Meters - print rasterFileList - print rasterFileList[0][0] + if verbose: + print(rasterFileList) + print(rasterFileList[0][0]) + with rasterio.open(rasterFileList[0][0]) as src: geoTransform = src.affine.to_gdal() diff --git a/python/evaluateScene.py b/python/evaluateScene.py index cfbdefc..58ba1f3 100644 --- a/python/evaluateScene.py +++ b/python/evaluateScene.py @@ -241,7 +241,7 @@ def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList, removeGeoJsonAfter=True): srcBaseName = os.path.splitext(baseName)[0] geoJsonList = glob.glob(srcBaseName+"_*.geojson") - print geoJsonList + print(geoJsonList) rasterList = [] for rasterLocation in rasterLocationList: diff --git a/python/evaluateScene_Total.py b/python/evaluateScene_Total.py index 9072a71..e449a6a 100644 --- a/python/evaluateScene_Total.py +++ b/python/evaluateScene_Total.py @@ -41,10 +41,22 @@ def writeAOISummaryToCSV(resultsDict,csvwriter): +def writeResultsToScreen(resultsDict): + print('AOI of Interest', resultsDict['AOI_Name']) + print('True_Pos_Total', resultsDict['TruePositiveTotal']) + print('False_Pos_Total', resultsDict['FalsePositiveTotal']) + print('False_Neg_Total', resultsDict['FalseNegativeTotal']) + print('F1ScoreTotal', resultsDict['F1ScoreTotal']) - +def writePerChipToCSV(resultsDictList, csvwriter): + resultsDict = resultsDictList[0] + csvwriter.writerow(['ImageId', 'F1Score', 'True Positive Count', 'False Positive Count', 'False Negative Count']) + for result in resultsDict['PerImageStatsResultList']: + tmpList = [result[1]] + tmpList.extend(result[0]) + csvwriter.writerow(tmpList) def evaluateSpaceNetSolution_Total(summaryTruthFile, summaryProposalFile, resultsOutputFile='', useParallelProcessing=False, minPolygonSize=0, @@ -201,10 +213,12 @@ def combineGeoJsonAndConvertToWGS84(baseName, rasterLocationList, 'AOI_3_Paris', 'AOI_4_Shanghai', 'AOI_5_Khartoum'], - removeGeoJsonAfter=True): + removeGeoJsonAfter=True, + verbose=False): srcBaseName = os.path.splitext(baseName)[0] geoJsonList = glob.glob(srcBaseName+"_*.geojson") - print geoJsonList + if verbose: + print(geoJsonList) rasterList = [] for rasterLocation in rasterLocationList: diff --git a/python/spaceNetUtilities/evalTools.py b/python/spaceNetUtilities/evalTools.py index 68cb183..f4e591d 100644 --- a/python/spaceNetUtilities/evalTools.py +++ b/python/spaceNetUtilities/evalTools.py @@ -129,7 +129,7 @@ def score(test_polys, truth_polys, threshold=0.5, truth_index=[], return true_pos_count, false_pos_count, false_neg_count -def evalfunction((image_id, test_polys, truth_polys, truth_index), +def evalfunction(image_id, test_polys, truth_polys, truth_index, resultGeoJsonName=[], threshold = 0.5): @@ -157,7 +157,7 @@ def evalfunction((image_id, test_polys, truth_polys, truth_index), return ((F1score, true_pos_count, false_pos_count, false_neg_count), image_id) -def create_eval_function_input((image_ids, (prop_polysIdList, prop_polysPoly), (sol_polysIdsList, sol_polysPoly))): +def create_eval_function_input(image_ids, prop_polysIdList, prop_polysPoly, sol_polysIdsList, sol_polysPoly): evalFunctionInput = [] diff --git a/python/spaceNetUtilities/evalToolsGPD.ipynb b/python/spaceNetUtilities/evalToolsGPD.ipynb index 77dfe02..d3065b1 100644 --- a/python/spaceNetUtilities/evalToolsGPD.ipynb +++ b/python/spaceNetUtilities/evalToolsGPD.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 8, + "execution_count": 1, "metadata": { "collapsed": true }, @@ -10,71 +10,128 @@ "source": [ "import sys\n", "sys.path.extend(['/Users/dlindenbaum/cosmiQGit/spaceNetUtilities_DaveL/python'])\n", - "from spaceNetUtilities import evalToolsGPD as eT\n", - "from spaceNetUtilities import geoToolsGPD as gT" + "#from spaceNetUtilities import evalToolsGPD as eT\n", + "#from spaceNetUtilities import geoToolsGPD as gT" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 2, "metadata": { "collapsed": true }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n" + ] + } + ], "source": [ "import geopandas as gpd\n", - "\n", - "srcTruthVector = '/Users/dlindenbaum/dataStorage/spaceNetPrivate_Truth/AOI_2_Vegas/srcData/vectorData/Vegas_Footprints.shp'\n", - "srcTestVector = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C7-056155973080_01_P001.shp'" + "#srcTruthVector = '/Users/dlindenbaum/dataStorage/spaceNetPrivate_Truth/AOI_2_Vegas/srcData/vectorData/Vegas_Footprints.shp'\n", + "srcTestVector1 = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C2-056155973080_01_P001.shp'\n", + "srcTestVector2 = '/Users/dlindenbaum/dataStorage/xd_Vegas/15OCT22183656-S2AS_R1C3-056155973080_01_P001.shp'\n", + "srcTileIndex = '/Users/dlindenbaum/dataStorage/xd_Vegas/mul-pansharpen.shp'" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:Fiona:PROJ data files not located, PROJ_LIB not set\n" + ] + } + ], "source": [ - "truthDF = gpd.read_file(srcTruthVector)\n", - "testDF = gpd.read_file(srcTestVector)" + "#truthDF = gpd.read_file(srcTruthVector)\n", + "testDF1 = gpd.read_file(srcTestVector1)\n", + "testDF2 = gpd.read_file(srcTestVector2)\n", + "srcTileDF = gpd.read_file(srcTileIndex)" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ - "prop_polysPoly = testDF.geometry.values\n", - "sol_polysPoly = truthDF.geometry.values" + "testIndex1 = testDF1.sindex\n", + "testIndex2 = testDF2.sindex" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 38, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "truthIndex = truthDF.sindex" + "srcTindex1 = srcTileDF.loc[srcTileDF.contains(testDF1.geometry.values[0].centroid)]\n", + "srcTindex2 = srcTileDF.loc[srcTileDF.contains(testDF2.geometry.values[0].centroid)]" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ - "eval_function_input_list = [['ImageId', prop_polysPoly, sol_polysPoly, truthIndex]]" + "testDF1['location'] = srcTindex1['location'].values[0]\n", + "testDF2['location'] = srcTindex2['location'].values[0]\n", + "testDF1['index1'] = testDF1.index\n", + "testDF2['index2'] = testDF2.index" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Edge\ntest1 Length of Matchs = 0\ntest2 Length of Matchs = 0\nEdge\ntest1 Length of Matchs = 0\ntest2 Length of Matchs = 36\nEdge\ntest1 Length of Matchs = 0\ntest2 Length of Matchs = 45\nEdge\ntest1 Length of Matchs = 41\ntest2 Length of Matchs = 29\n" + ] + } + ], "source": [ - "resultList = eT.evalfunction(eval_function_input_list[0])" + "from shapely.geometry import LineString\n", + "coordsList = list(srcTindex2.geometry.values[0].exterior.coords)\n", + "lineStringList = []\n", + "for idx in range(1, len(coordsList)):\n", + " lineStringList.append(LineString([coordsList[idx-1], coordsList[idx]]))\n", + "\n", + "pixelDimension = 0.0000027\n", + "numPixel = 5\n", + "\n", + "for lineboundary in lineStringList:\n", + " print(\"Edge\")\n", + " print('test1 Length of Matchs = {}'.format(len(list(testIndex1.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds)))))\n", + " print('test2 Length of Matchs = {}'.format(len(list(testIndex2.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds)))))" ] }, { @@ -85,7 +142,7 @@ { "data": { "text/plain": [ - "True" + "'LINESTRING (-115.2632808 36.24184080000634, -115.2632808 36.2639592)'" ] }, "execution_count": 8, @@ -94,7 +151,279 @@ } ], "source": [ - "prop_polysPoly[0].is_valid" + "lineStringList[3].wkt" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "lineboundary = lineStringList[3]\n", + "numPixel = 2\n", + "testDF1Intersection = testDF1.iloc[list(testIndex1.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds))]\n", + "testDF2Intersection = testDF2.iloc[list(testIndex2.intersection(lineboundary.buffer(numPixel*pixelDimension).bounds))]\n", + "testDF1Intersection.geometry = testDF1Intersection.buffer(numPixel*pixelDimension)\n", + "testDF2Intersection.geometry = testDF2Intersection.buffer(numPixel*pixelDimension)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "res_union = gpd.overlay(testDF1Intersection, testDF2Intersection, how='union')\n", + "res_intersection = gpd.overlay(testDF1Intersection, testDF2Intersection, how='intersection')" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "union shape (23, 5)\n" + ] + }, + { + "data": { + "text/plain": [ + "1 2.145807e-09\n5 8.390497e-10\n15 8.123570e-10\n21 2.279853e-10\n24 9.676290e-11\ndtype: float64" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "print(\"union shape {}\".format(res_union[(res_union['raster_val']==1.0) & (res_union['raster_val_2']==1.0)].shape))\n", + "res_union[(res_union['raster_val']==1.0) & (res_union['raster_val_2']==1.0)].head().area" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "intersection shape (23, 5)\n" + ] + }, + { + "data": { + "text/plain": [ + "0 2.145807e-09\n1 8.390497e-10\n2 8.123570e-10\n3 2.279853e-10\n4 9.676290e-11\ndtype: float64" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "print(\"intersection shape {}\".format(res_intersection[(res_intersection['raster_val']==1.0) & (res_intersection['raster_val_2']==1.0)].shape))\n", + "res_intersection[(res_intersection['raster_val']==1.0) & (res_intersection['raster_val_2']==1.0)].head().area" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "

\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
geometrylocationlocation_2raster_valraster_val_2
0POLYGON ((-115.2633455998505 36.2419426698028,...15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIFNone1.0NaN
1POLYGON ((-115.2632862000001 36.24283980000606...15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF1.01.0
2POLYGON ((-115.2632821499251 36.24284502199291...15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIFNone1.0NaN
3POLYGON ((-115.2632821499251 36.24284502199291...None15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIFNaN1.0
4POLYGON ((-115.2632800697965 36.243042300006, ...None15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIFNaN1.0
\n", + "
" + ], + "text/plain": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
geometrylocationlocation_2raster_valraster_val_2
0POLYGON ((-115.2633455998505 36.2419426698028,...15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIFNone1.0NaN
1POLYGON ((-115.2632862000001 36.24283980000606...15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIF15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIF1.01.0
2POLYGON ((-115.2632821499251 36.24284502199291...15OCT22183656-S2AS_R1C2-056155973080_01_P001.TIFNone1.0NaN
3POLYGON ((-115.2632821499251 36.24284502199291...None15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIFNaN1.0
4POLYGON ((-115.2632800697965 36.243042300006, ...None15OCT22183656-S2AS_R1C3-056155973080_01_P001.TIFNaN1.0
\n", + "
" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "res_union.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using matplotlib backend: MacOSX\n" + ] + } + ], + "source": [ + "%matplotlib auto" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "res_union[(res_union['raster_val']==1.0) & (res_union['raster_val_2']==1.0)].head().plot()" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "res_union.plot()" ] }, { diff --git a/python/spaceNetUtilities/evalToolsGPD.py b/python/spaceNetUtilities/evalToolsGPD.py index 5b652ea..c8630a8 100644 --- a/python/spaceNetUtilities/evalToolsGPD.py +++ b/python/spaceNetUtilities/evalToolsGPD.py @@ -1,5 +1,7 @@ import numpy as np import geoToolsGPD as gT +from shapely.geometry import mapping, Polygon +import fiona import os from tqdm import tqdm def iou(test_poly, truth_polys, truth_index=[]): @@ -18,7 +20,7 @@ def iou(test_poly, truth_polys, truth_index=[]): if intersection_result.geom_type == 'Polygon' or \ intersection_result.geom_type == 'MultiPolygon': intersection_area = intersection_result.area - union_area = test_poly.union(truth_polys[fid]).ara + union_area = test_poly.union(truth_polys[fid]).area iou_list.append(intersection_area / union_area) else: @@ -27,12 +29,39 @@ def iou(test_poly, truth_polys, truth_index=[]): return iou_list, fidlistArray +def write_geojson(geojson_name, + feature_list, + output_crs={'init': 'epsg:4326'}, + output_schema={'geometry': 'Polygon', + 'properties': {'ImageId': 'str', + 'IOUScore': 'float:15.5', + 'BuildingId': 'int'} + }, + output_driver='GeoJSON' + ): + with fiona.open(geojson_name,'w', + driver=output_driver, + crs=output_crs, + schema=output_schema) as sink: + + for feature in feature_list: + sink.write(feature) + + + def score(test_polys, truth_polys, threshold=0.5, truth_index=[], resultGeoJsonName = [], imageId = []): # Define internal functions + # + output_schema = {'geometry': 'Polygon', + 'properties': {'ImageId': 'str', + 'IOUScore': 'float:15.5', + 'BuildingId': 'int'} + } + output_crs = {'init': 'epsg:4326'} # Find detections using threshold/argmax/IoU for test polygons true_pos_count = 0 @@ -43,17 +72,7 @@ def score(test_polys, truth_polys, threshold=0.5, truth_index=[], if not imageId: imageId = os.path.basename(os.path.splitext(resultGeoJsonName)[0]) - driver = ogr.GetDriverByName('geojson') - if os.path.exists(resultGeoJsonName): - driver.DeleteDataSource(resultGeoJsonName) - datasource = driver.CreateDataSource(resultGeoJsonName) - layer = datasource.CreateLayer('buildings', geom_type=ogr.wkbPolygon) - field_name = ogr.FieldDefn("ImageId", ogr.OFTString) - field_name.SetWidth(75) - layer.CreateField(field_name) - layer.CreateField(ogr.FieldDefn("BuildingId", ogr.OFTInteger)) - layer.CreateField(ogr.FieldDefn("IOUScore", ogr.OFTReal)) - + feature_list = [] for test_poly in tqdm(test_polys): if truth_polys: @@ -65,25 +84,30 @@ def score(test_polys, truth_polys, threshold=0.5, truth_index=[], if maxiou >= threshold: true_pos_count += 1 - truth_index.delete(fidlist[np.argmax(iou_list)], truth_polys[fidlist[np.argmax(iou_list)]].GetEnvelope()) + truth_index.delete(fidlist[np.argmax(iou_list)], truth_polys[fidlist[np.argmax(iou_list)]].bounds) #del truth_polys[fidlist[np.argmax(iou_list)]] - if resultGeoJsonName: - feature = ogr.Feature(layer.GetLayerDefn()) - feature.SetField('ImageId', imageId) - feature.SetField('BuildingId', fidlist[np.argmax(iou_list)]) - feature.SetField('IOUScore', maxiou) - feature.SetGeometry(test_poly) + + feature = {'geometry': mapping(test_poly), + 'properties': {'ImageId': imageId, + 'IOUScore': maxiou, + 'BuildingId': fidlist[np.argmax(iou_list)] + } + } + else: false_pos_count += 1 - if resultGeoJsonName: - feature = ogr.Feature(layer.GetLayerDefn()) - feature.SetField('ImageId', imageId) - feature.SetField('BuildingId', -1) - feature.SetField('IOUScore', maxiou) - feature.SetGeometry(test_poly) + feature = {'geometry': mapping(test_poly), + 'properties': {'ImageId': imageId, + 'IOUScore': maxiou, + 'BuildingId': -1 + } + } + + + @@ -92,20 +116,18 @@ def score(test_polys, truth_polys, threshold=0.5, truth_index=[], else: false_pos_count += 1 + feature = {'geometry': mapping(test_poly), + 'properties': {'ImageId': imageId, + 'IOUScore': 0, + 'BuildingId': 0 + } + } - if resultGeoJsonName: - feature = ogr.Feature(layer.GetLayerDefn()) - feature.SetField('ImageId', imageId) - feature.SetField('BuildingId', 0) - feature.SetField('IOUScore', 0) - feature.SetGeometry(test_poly) - if resultGeoJsonName: - layer.CreateFeature(feature) - feature.Destroy() + feature_list.append(feature) if resultGeoJsonName: - datasource.Destroy() + write_geojson(resultGeoJsonName, feature_list) false_neg_count = truth_poly_count - true_pos_count @@ -113,7 +135,7 @@ def score(test_polys, truth_polys, threshold=0.5, truth_index=[], return true_pos_count, false_pos_count, false_neg_count -def evalfunction((image_id, test_polys, truth_polys, truth_index), +def evalfunction_GPD(image_id, test_polys, truth_polys, truth_index, resultGeoJsonName=[], threshold = 0.5): @@ -141,7 +163,7 @@ def evalfunction((image_id, test_polys, truth_polys, truth_index), return ((F1score, true_pos_count, false_pos_count, false_neg_count), image_id) -def create_eval_function_input((image_ids, (prop_polysIdList, prop_polysPoly), (sol_polysIdsList, sol_polysPoly))): +def create_eval_function_input(image_ids, prop_polysIdList, prop_polysPoly, sol_polysIdsList, sol_polysPoly): evalFunctionInput = [] diff --git a/python/spaceNetUtilities/evalToolsGPD.pyc b/python/spaceNetUtilities/evalToolsGPD.pyc index 0d32ab4ee21dc00e706a752367d7ea295371fd4f..50fba47329e38a64c730eb4c48e9f36c9e8cd394 100644 GIT binary patch delta 410 zcmbQC(4oM_{F#^Qy81@87)D0+$;pfvjGU9tFcxffVTxj8RNp+GnV(VoJ!6RwLk$B% zGb4jb>_f&{UWOV*hN67T5>QkFxIdz6ctWBz^XsFiq%M@kg4z{Qw=*q z3MYekEjt59@+MF+MC2w@UMd5ogu&zpHa|vz&CA)^85zYU>vL#wu`w_(B;}Xpr4&z2 z=g1Y|1#(L`K}1GTYH>z>PD&8p{E`aB>IdeMZC08C<@M8hjuTYY<@r zBCLQ!IuL_^g9Zzb43Ys6?vw9ws~UpUfs~YR0Ex`p#Prno%#C!t5{nWy+b~5jGHPv}&dkp! z{)w?fh@pmop_!4vCH4tpEiXe2BSTU74YgibHN+vt7CW@3W z72aX0VP{C;WH7H~X8=ju0ZQmke!{9Ma)&7|l>wWi51T)u@a8#e?Tn1#lT|o0`GMv# zxTlu5=9Q)9e8urgHNF#exJugwW&)&ijle znbJB0MIa zRv;B1i-W+X*@0O0Ai{mKGEWa9qyFR#yzY$NlYj7*3;2MB~ zvY$QY+;h*p_x!%UyYE~SXMQEhomX?S^ERB=6{hQd`TDy{e^TLd!sNmXKUXvXpnQl;k2_rj=wF8_P(R zv$33H1sf|!E@opTNk1F6kqofW#>FeS)GD^C*q&U?PBlC2Bx^YPc9QzUb%Z_E^TzL( z7ir+gnj3o!l5FIEJ*AeX=mQW8OlLzjQ96%8YyW@BsXzGMs6VN7D&`O5uB5|)Mg$!u8gZy^`0EOhWVtmWX5zr0HIX#c zCI8B7D^rJ}acLe@C4oo1*H9u`gBXW)I?;|iD1s(&wq_))`6+!h*{qMyW)Xy7!D!SF< z!HeR!8f-iv2GmE5w&z(SeiJyZUTb}+IZ=5~h1O4qS@q`n zqhiL6ZJ5doTt+I-e-{=P`3qM9W>sEqRq6lk5hIYF`nY4!q~+08k7;d+kDIYX(lj7K z;|CfRB&0{W%^_=S)HE();O3!>D{xN(e*@kJt^%JP57r03-+_N<1lwT!6ZjCg2K4+3 zto;(U|v(;{u}rYKte+R$KX0(Wvd_emUeM77QnwL9qybNA2+fPc5^P^0djy` zAP>k_sm)bIUYG?yAyA~Dwr>s=7!`0A1G8AyuioCA7D?N@%aiGP6_IH*zGF+nNtkHw zJXw^&MPGP3hoVX2iu&`8hRHV(*B>&90e(>2;Xi3NScMq(hnvQ`o!dk?c2Wl1N&|dT zgkwXg)b^nRrZIzZy!A>w3r{yl$wX{oR*udI4@Z-jur->pj3z8_R=s?8 z?WVbII}ywPG69!{m8)l>TE7{#5~I4N@s^ITh7w~|kD0hDm56iG;x3j$maRUw!`X_v z!O{h&G79BIaShz^M8(YWi1gj);L0baGc{~obOo$asn@{?x`d@2d8#)Rq{C>IesG0-dY zUG?g{!}@Zr+Vy2uD<&^f)4Qg{8QTnvI9#mHr`21Z-|8q7s%Fo3#cTE(d#(tvTU{7z zDaWwo#lc4P%WrNp&sz3iulE7lbSW!K zLq=1nL~PtNYEYb)L0C^4k;`2XN zctX|fKjRt1Bwp%i^@sf}yCwWSOo;++cl5Kg60FGVZIS*D4l3YL*EC%c+dE{$eT^g&X2r1A-CnI*s}$T0`mqQU#L1 zTvCOy%sf&vM9e1@6tRF*k%(GS#Ud7x3W-=mDlDRoREdbiWRI9-N+sw+vgAzB^`d8q zUP7u&yqA(PTQ4W%S|K}FHqH_eOT*oy=a8zDkC`%CrkI@&1g48nRWv$fgrbfA>l{^0 zqgfPvSSo4i6p(OU>TM!bLkIY791@4QG@41#<#&xHp?R{lDU~TwEsai~=p&~7CXgwS zDNY^rWRO}cn{FockZk%V3QJo0~cPaOn_lAdq?oq$b%X8ghnR}U6xW$1jK5ln+=e)u^sOzR( z<fX?P(r+CwPcqg%>pvxoMYkC*lsS0C;1&>kE$9DYYE}l%=1CGRQwVf5oAv=-kw|lNu`JIRVgt4by-kwo9!T}6c+C0C*wgSPPd*oN$P+{L5C&JXxzf%0XGP)MdU9Cm zVTxgDXiCTWgQD3%$p*Ri*r~p4&dz>Y4i`D;P;sqK64RTCzd9|$97q~TR$KAE9dH?;rF?~-Cweh15X)0s|GlzFO}ZluzO*q!}*6`88?R8!}^i3 zrW_6VZQwh=clF`2BV4O1Xa9xw>8$cA{6+oS^0)bh0ve(VrYj2_i}#~CwXfC5hIXR922t+IrNbwSl&&JSSxA#fac z&J2Hx54ttgmpy#g{aek-4Bo99Y76+MdVTH6z{_wt4_t6h*B;^gSD{}6E&`W;*Y(Cl zW&Cr!chPk-ZB2PfzgIVvPr29Xe&yqf`i*5BIX{A5J}@0PuWK73X6Kt5iu3*i1J;qF z=}pCx)&>2|hLHbR44l(v8}{=jdO_oYfUI~8)*Cw2*yev7@`Qe^v5r61e{VeLvxbdD zAAfi!-_%7-NBFw?Qquun_+$9VG>AZu!94(^$rH^?ZQISS{HzS0JnhKz}SAE$TXY9fX5)RNGDqhMkKchFir$A zfpI&=yh*SHfXP4>Z~_BUppcf84V42-1*QSA|6HhheIc(lJ2_YHMg! z5iG?3s%eFw!axa73d{s%0cF5!pd6?GBEU_|ox}Qx@}xfAme0H0^KAj2{|d|(_0?4? zE6zcp#N+y+0Jo@<UBA^HYQfvd`XlfrAbnj4B}Y<-5I#VT3As8`3}T6Re|vnFy~=j%fwXl^ z&tLmMw`}Kr5&y5Y#y8WT)3+^_wjRWC?;?d6P(h#um9`_8B9@=z@uejmO zA2QoIXKOWosvEWz@)o^eYb0o;kq^F)tt#M*L1ZdahkIV4D-v@Jh~Xm zz?k^sJJs5wZ+C<_sl%PqxkJ}=eviZYH=V!m_h7)9TQdyXO1*F|5F!#42@%1z Date: Sun, 10 Sep 2017 14:26:58 -0400 Subject: [PATCH 25/26] added cw_environment.yml for conda install --- cw_environment.yml | 59 +++++++++++++++++++++++++++++++++ python/mbtilesConvertScratch.py | 0 2 files changed, 59 insertions(+) create mode 100644 cw_environment.yml create mode 100644 python/mbtilesConvertScratch.py diff --git a/cw_environment.yml b/cw_environment.yml new file mode 100644 index 0000000..6132d63 --- /dev/null +++ b/cw_environment.yml @@ -0,0 +1,59 @@ +name: cw_environment +channels: +- conda-forge +- defaults +dependencies: +- cython +- libgdal=2.2.1 +- numpy +- opencv=3.2.0=np113py36_blas_openblas_204 +- pip=9.0.1=py36_0 +- pyproj +- pytables +- python=3.6.2=0 +- python-dateutil=2.6.1=py36_0 +- pytz=2017.2=py36_0 +- pywavelets=0.5.2=np113py36_0 +- pyyaml=3.12=py36_1 +- readline=6.2=0 +- requests=2.18.4=py36_1 +- scikit-image +- scikit-learn +- scipy +- setuptools=36.2.2=py36_0 +- six=1.10.0=py36_1 +- sortedcontainers=1.5.7=py36_0 +- sqlite=3.13.0=1 +- tblib=1.3.2=py36_0 +- tk=8.5.19=2 +- toolz=0.8.2=py36_0 +- tornado=4.5.1=py36_0 +- urllib3=1.22=py36_0 +- wheel=0.29.0=py36_0 +- x264=20131217=3 +- xerces-c=3.1.4=3 +- xz=5.2.3=0 +- yaml=0.1.6=0 +- zict=0.1.2=py36_0 +- zlib=1.2.11=0 +- pip: + - affine==2.1.0 + - boto3==1.4.6 + - botocore==1.6.8 + - click-plugins==1.0.3 + - cligj==0.4.0 + - descartes==1.1.0 + - docutils==0.14 + - fiona==1.7.9.post1 + - geopandas==0.2.1 + - jmespath==0.9.3 + - keras==1.1.0 + - munch==2.2.0 + - pathlib + - rasterio==1.0a9 + - s3transfer==0.1.10 + - shapely==1.6.0 + - snuggs==1.4.1 + - tables==3.4.2 + - theano==0.9.0 + - tqdm \ No newline at end of file diff --git a/python/mbtilesConvertScratch.py b/python/mbtilesConvertScratch.py new file mode 100644 index 0000000..e69de29 From cd89e6f434957162f9d89164f78a1f76f9df8364 Mon Sep 17 00:00:00 2001 From: dlindenbaum Date: Sun, 10 Sep 2017 15:06:25 -0400 Subject: [PATCH 26/26] created osmtools.py --- python/spaceNetUtilities/osmtools.py | 52 ++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 python/spaceNetUtilities/osmtools.py diff --git a/python/spaceNetUtilities/osmtools.py b/python/spaceNetUtilities/osmtools.py new file mode 100644 index 0000000..519156c --- /dev/null +++ b/python/spaceNetUtilities/osmtools.py @@ -0,0 +1,52 @@ +import geopandas as gpd +import geopandas_osm +from osmnx import core +from shapely.geometry import Polygon + + +def createPolygonShapeFileFromOSM(polyBounds, pointOfInterestList, outVectorFile='', debug=True): + + gdfOSM = gpd.GeoDataFrame([]) + for pointOfInterestKey in pointOfInterestList.keys(): + + gdfPOITemp = core.osm_net_download(polygon=polyBounds, + infrastructure='node["{}"]'.format(pointOfInterestKey) + ) + gdfOSM.append(gdfPOITemp) + + gdfPOITemp = core.osm_net_download(polygon=polyBounds, + infrastructure='way["{}"]'.format(pointOfInterestKey) + ) + + gdfOSM.append(gdfPOITemp) + + #gdfPOITemp = geopandas_osm.osm.query_osm('way', polyBounds, recurse='down', + # tags='{}'.format(pointOfInterestKey)) + + gdfOSM.append(gdfPOITemp) + + #gdfPOITemp = geopandas_osm.osm.query_osm('node', polyBounds, recurse='down', + # tags='{}'.format(pointOfInterestKey)) + + gdfOSM.append(gdfPOITemp) + + gdfOSM.drop_duplicates(subset=['id'], keep='first', inplace=True) + + ## convert closed LineStrings to Polygon + geomList = [] + for geom in gdfOSM.geometry.values: + if geom.is_closed & geom.geom_type == "LineString": + geom = Polygon(geom) + else: + geom + geomList.append(geom) + + gdfOSM.geometry = geomList + + gdfOSM = gdfOSM[(gdfOSM.geom_type=='Point') or (gdfOSM.geom_type=='Polygon')] + ## + + + return gdfOSM + + #gdfFinal = createPolygonShapeFileGPD(pointOfInterestList, gdfOSM, outVectorFile=outVectorFile)