Commit 975095e6 authored by Mario Chirinos's avatar Mario Chirinos

sedema update

parent 369dd3c5
#!/bin/sh
CSV=$1
CFG=$2
if [ ! -d "data" ]; then
mkdir data
fi
cd data
GI_createDirectories.py ../$CSV ../$CFG
for d in $(ls -d */) ; do
echo "PROCESSING " $d ...
if [ ! -f $(pwd)/"$d/scl_data.json" ]; then
GI_L2ASCL_AreaProcessing.sh $(pwd)/"$d" 0
else
echo "PASSING " $d
fi
done
#!/usr/bin/python
# -*- coding: utf-8 -*-
import osgeo.ogr as ogr
import osgeo.osr as osr
import numpy as np
import gdal
import sys
import os
import json
from collections import Counter
#from collections import OrderedDict
########### lee archivo de configuración ################
dirname = os.path.dirname(__file__)
configfile = os.path.join(dirname, '../config/config.json')
with open(configfile, 'r') as f:
config = json.load(f)
#print(config['PATHS']['PATH_GEOSENTINEL'])
SENTINEL_PATH = config['PATHS']['PATH_GEOSENTINEL']
###########################################################
sys.path.append(SENTINEL_PATH)
from geosentinel import rasterWkt
CLASS_MAP = {0:0, 1:1, 2:2, 3:3, 4:4, 5:5, 6:6, 7:7, 8:8, 9:8, 10:8, 11:9}
#-------------------------------------------------------------------------------
def L2ASCLtoDict(filename, wkt, wktsub):
'''
L2ASCLtoDict
'''
classMap = lambda x: [ CLASS_MAP[xx] for xx in x ]
inputImage = gdal.Open(filename)
rows, cols, geotransform = inputImage.RasterYSize, inputImage.RasterXSize, inputImage.GetGeoTransform()
data = inputImage.GetRasterBand(1).ReadAsArray(0,0,cols,rows)
data1 = np.bitwise_and(data, rasterWkt.getPolygonArray(inputImage, wkt))
count1 = Counter([])
for r in data:
count1 = count1+ Counter(classMap(r))
sclDict1 = {str(k):v for k,v in dict(count1).items()}
if wktsub!="":
data2 = np.bitwise_and(data1, np.invert(rasterWkt.getPolygonArray(inputImage, wktsub)))
count2 = Counter([])
for r in data2:
count2 = count2 + Counter(classMap(r))
sclDict2 = {str(k):v for k,v in dict(count2).items()}
else:
sclDict2=sclDict1
return {"areaTotal":sclDict1, "areaNoConservacion":sclDict2}
#-------------------------------------------------------------------------------
def main(argv):
if len(sys.argv) != 3:
print("Usage: " + argv[0] + "<JSON SCL> <JSON Cfg>")
else:
jsonFile=open(argv[2]).read()
cfg = json.loads(jsonFile)
dataDict = L2ASCLtoDict(argv[1], cfg['wkt'], cfg["wkt2"])
print(json.dumps(dataDict, sort_keys=True))
#-------------------------------------------------------------------------------
if __name__ == "__main__":
main(sys.argv)
#!/bin/sh
DIR=$1
CGF=$2
JOBS=${3:-1}
cd $DIR
ls *.tif | parallel --jobs $JOBS 'GI_L2ASCLtoJSON.py '{} $2 ' > $(echo '{}' | cut -d"." -f1).json'
#!/bin/sh
DIR=$1
CGF=$2
JOBS=${3:-1}
cd $DIR
ls *.tif | parallel --jobs $JOBS 'GI_getComponentsMorphology.py '{} $2 ' > $(echo '{}' | cut -d"." -f1).json'
......@@ -14,16 +14,17 @@ def createDirectories(datafile, cfgfile, level1="CVE_ENT", level2="CVE_MUN"):
data = csv.DictReader(csvfile, delimiter=',')
for row in data:
mun = row[level1]
ent = row[level2]
dirname = ent+mun
mun = row[level2]
ent = row[level1]
dirname = ent.zfill(2)+"-"+mun.zfill(3)
print(ent)
# for k in cfg.keys():
# p[k]=cfg[k]
print(os.getcwd())
cfg["projectDir"]=os.getcwd()+"/"+dirname+"/"
cfg["endDate"]=datetime.datetime.now().strftime("%Y%m%d")
# cfg["endDate"]=datetime.datetime.now().strftime("%Y%m%d")
cfg["wkt"]=row["WKT"]
cfg["wkt2"]=row["WKT2"]
if not os.path.exists(dirname):
os.mkdir(dirname)
with open(dirname+'/findProducts.json', 'w') as outfile:
......@@ -31,11 +32,14 @@ def createDirectories(datafile, cfgfile, level1="CVE_ENT", level2="CVE_MUN"):
#===============================================================================
def main(argv):
if len(sys.argv) != 3:
print(len(sys.argv))
print ("Usage: "+argv[0] + " <csv> <cfg>")
else:
createDirectories(argv[1], argv[2])
if len(sys.argv) != 3 and len(sys.argv) != 5:
print(len(sys.argv))
print ("Usage: "+argv[0] + " <csv> <cfg>")
elif(len(sys.argv) == 3 ):
createDirectories(argv[1], argv[2])
else:
createDirectories(argv[1], argv[2], argv[3], argv[4])
if __name__ == "__main__":
......
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
import os
import json
import csv
import gdal
import numpy as np
import osgeo.ogr as ogr, osr
sys.path.append("/home/geoint/GeoSentinel")
from geosentinel import rasterWkt
from skimage import measure
AREAm = 60*60 #m²
AREAkm = AREAm/1000000#km²
DISTANCE = 60#m
#===============================================================================
def getComponentsMorphology(image_filename, wkt, wktsub):
inputImage = gdal.Open(image_filename)
rows, cols, geotransform = inputImage.RasterYSize, inputImage.RasterXSize, inputImage.GetGeoTransform()
data = inputImage.GetRasterBand(1).ReadAsArray(0,0,cols,rows)
data1 = np.bitwise_and(data, rasterWkt.getPolygonArray(inputImage, wkt))
if wktsub!="":
data2 = np.bitwise_and(data1, np.invert(rasterWkt.getPolygonArray(inputImage, wktsub)))
else:
data2 = data1
geometry = ogr.CreateGeometryFromWkt(wkt)
inSpatialRef = osr.SpatialReference()
inSpatialRef.ImportFromEPSG(4326)
outSpatialRef = osr.SpatialReference()
outSpatialRef.ImportFromEPSG(32514)
coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
geometry.Transform(coordTrans)
wktArea = geometry.GetArea()
classDict={}
for i in range(12):
myDict = dict()
dataC = data2==i
myDict["area_tot"]=wktArea/1000000
myDict["area_verde_tot"] = np.count_nonzero(dataC)
if myDict["area_verde_tot"]==0.0:
continue
myDict["area_verde_tot"]=myDict["area_verde_tot"]*AREAkm
components_labels = measure.label(dataC)
myDict["nAreas"] = int( np.max(components_labels))
properties = measure.regionprops(components_labels)
myDict["gf1"] = 1 / (myDict["area_verde_tot"]/myDict["nAreas"])
# myDict["gf2"]
myDict["gdv"]=myDict["area_verde_tot"]/myDict["area_tot"]
myDict["vp"]=myDict["area_verde_tot"]
myDict["tp"]=myDict["area_verde_tot"]/myDict["nAreas"]
myDict["fp"]=sum([ p["perimeter"]/p["area"] for p in properties])/myDict["nAreas"]
myDict["alp"]=sum([ 0 if p["minor_axis_length"]==0 else p["major_axis_length"]/p["minor_axis_length"] for p in properties])/myDict["nAreas"]
classDict[str(i)]=myDict
# print(myDict)
# print(classDict)
return classDict
#===============================================================================
def main(argv):
if len(sys.argv) != 3:
print (argv[0] + " <image.tif> <cfg.json>")
else:
with open(argv[2]) as jsonFile:
cfg = json.load(jsonFile)
dataDict = getComponentsMorphology(argv[1], cfg["wkt"], cfg["wkt2"])
print(json.dumps(dataDict, sort_keys=True))
if __name__ == "__main__":
main(sys.argv)
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
import os
import json
import csv
import gdal
import numpy as np
import osgeo.ogr as ogr, osr
sys.path.append("/home/mchc/git/GeoSentinel")
from geosentinel import rasterWkt
from skimage import measure
AREAm = 60*60 #m²
AREAkm = AREAm/1000000#km²
DISTANCE = 60#m
#===============================================================================
def getComponentsMorphology(image_filename, wktlist):
inputImage = gdal.Open(image_filename)
rows, cols, geotransform = inputImage.RasterYSize, inputImage.RasterXSize, inputImage.GetGeoTransform()
data = inputImage.GetRasterBand(1).ReadAsArray(0,0,cols,rows)
areaList = []
for key, value in wktlist.items():
print(key)
dataMask = np.bitwise_and(data, rasterWkt.getPolygonArray(inputImage, value))
geometry = ogr.CreateGeometryFromWkt(value)
inSpatialRef = osr.SpatialReference()
inSpatialRef.ImportFromEPSG(4326)
outSpatialRef = osr.SpatialReference()
outSpatialRef.ImportFromEPSG(32514)
coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
geometry.Transform(coordTrans)
wktArea = geometry.GetArea()
print(key, wktArea/1000000)
myDict = dict()
myDict["clave"] = key
dataC = dataMask!=0#==i
myDict["area_tot"]=wktArea/1000000
myDict["area_verde_tot"] = np.count_nonzero(dataC)
if myDict["area_verde_tot"]==0.0:
continue
myDict["area_verde_tot"]=myDict["area_verde_tot"]*AREAkm
components_labels = measure.label(dataC)
myDict["nAreas"] = int( np.max(components_labels))
properties = measure.regionprops(components_labels)
myDict["gf1"] = 1 / (myDict["area_verde_tot"]/myDict["nAreas"])
## myDict["gf2"]
myDict["gdv"]=myDict["area_verde_tot"]/myDict["area_tot"]
myDict["vp"]=myDict["area_verde_tot"]
myDict["tp"]=myDict["area_verde_tot"]/myDict["nAreas"]
myDict["fp"]=sum([ p["perimeter"]/p["area"] for p in properties])/myDict["nAreas"]
## print("axis", [ p["minor_axis_length"] for p in properties])
myDict["alp"]=sum([ 0 if p["minor_axis_length"]==0 else p["major_axis_length"]/p["minor_axis_length"] for p in properties])/myDict["nAreas"]
# classDict[str(i)]=myDict
areaList.append(myDict)
print(myDict)
with open('morphology.csv', 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=areaList[0].keys())
writer.writeheader()
for i in areaList:
writer.writerow(i)
## print(classDict)
# return classDict
#===============================================================================
def main(argv):
if len(sys.argv) != 3:
print ("Usage: " + argv[0] + " <reference_image.tif> <wkt_list.csv>")
else:
with open(argv[2]) as csvfile:
reader = csv.DictReader(csvfile)
areasDic = { row["CVEGEO"]:row["WKT"] for row in reader}
dataDict = getComponentsMorphology(argv[1], areasDic)
if __name__ == "__main__":
main(sys.argv)
......@@ -8,32 +8,58 @@ import csv
def getMinMax(clase):
polylist = []
morphList = []
for item in os.listdir("."):
if os.path.isdir(item):
print(item)
with open(os.getcwd()+"/"+item+"/scl_data.json") as json_file:
dataSCL = json.load(json_file)
with open(os.getcwd()+"/"+item+"/scl_morph.json") as json_file:
dataMorph = json.load(json_file)
with open(os.getcwd()+"/"+item+"/findProducts.json") as json_file:
dataWKT = json.load(json_file)
wkt = dataWKT["wkt"]
values = []
for k, v in dataSCL.items():
values.append(v[clase])
minval = min(values)
maxval = max(values)
polylist.append({"clave": item, "wkt":wkt, "min":minval, "max":maxval})
polyDict = {"clave": item, "wkt":wkt}
cDict = {k:v["areaTotal"][clase] for k, v in dataSCL.items() }
maxkey = max(cDict, key=cDict.get )
maxval = dataSCL[maxkey]["areaTotal"][clase]
polyDict["areaTotal"] = maxval
cDict = {k:v["areaNoConservacion"][clase] for k, v in dataSCL.items() if clase in v["areaNoConservacion"]}
if len(cDict) >0 and clase in dataSCL[maxkey]["areaNoConservacion"]:
maxkey = max(cDict, key=cDict.get )
maxval = dataSCL[maxkey]["areaNoConservacion"][clase]
else:
maxval=0
polyDict["areaNoConservacion"] = maxval
polylist.append(polyDict)
tmpDict = dataMorph[maxkey][clase]
tmpDict["wkt"] = wkt
tmpDict["clave"]=item
morphList.append(dataMorph[maxkey][clase])
with open('minmax_'+clase+'.csv', 'w') as outfile:
writer = csv.DictWriter(outfile, fieldnames=polylist[0].keys())
writer.writeheader()
for item in polylist:
writer.writerow(item)
# json.dump(polylist, outfile)
with open('morphology_'+clase+'.csv', 'w') as outfile:
writer = csv.DictWriter(outfile, fieldnames=morphList[0].keys())
writer.writeheader()
for item in morphList:
writer.writerow(item)
#===============================================================================
def main(argv):
......
#!/usr/bin/python
# -*- coding: utf-8 -*-
#"POLYGON((-89.65513229370117 21.048938357786966,-89.62852478027342 21.047816903874505,-89.6261215209961 21.03339745836918,-89.6465492248535 21.022822311328852,-89.65873718261717 21.039325621609095,-89.65513229370117 21.048938357786966))"
import osgeo.ogr as ogr
import osgeo.osr as osr
import numpy as np
import gdal
import sys
from collections import Counter
#-------------------------------------------------------------------------------
def getPolygonArray(image, shp_filename):
rows = image.RasterYSize
cols = image.RasterXSize
geotransform = image.GetGeoTransform()
projection = image.GetProjection()
sr = osr.SpatialReference(wkt=projection)
sr4326 = osr.SpatialReference()
sr4326.ImportFromEPSG(4326)
driverMEM = gdal.GetDriverByName('MEM')
driverMemory = ogr.GetDriverByName('Memory')
driverTiff = gdal.GetDriverByName('GTiff')
driverShape = ogr.GetDriverByName("ESRI Shapefile")
#RASTER
target_ds = driverMEM.Create('raster.tif', cols, rows, 1, gdal.GDT_Byte) #MEM
target_ds.GetRasterBand(1).SetNoDataValue(0)
target_ds.GetRasterBand(1).WriteArray(np.zeros((rows, cols), dtype=np.uint8))
target_ds.SetProjection(projection)
target_ds.SetGeoTransform(geotransform)
target_ds.GetRasterBand(1).FlushCache()
#VECTOR
dataSource = driverShape.Open(shp_filename, 0)
# Check to see if shapefile is found.
if dataSource is None:
print('Could not open', shp_filename)
else:
print('Opened %s', shp_filename)
layer = dataSource.GetLayer()
featureCount = layer.GetFeatureCount()
print("Number of features in", shp_filename, featureCount)
# rast_ogr_ds = driverMemory.CreateDataSource( 'myshape.shp' )
# rast_mem_lyr = rast_ogr_ds.CreateLayer( 'POLYGON', sr4326, ogr.wkbPolygon)
# feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
# feat.SetGeometry( ogr.CreateGeometryFromWkt(wkt_geom) )
# rast_mem_lyr.CreateFeature( feat )
err = gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[255])
if err != 0:
print(err)
gdaltest.post_reason( 'got non-zero result code from RasterizeLayer' )
return 'fail'
target_ds.GetRasterBand(1).FlushCache()
np_polygon = target_ds.GetRasterBand(1).ReadAsArray(0,0,cols,rows)
count = Counter([])
# for r in np_polygon:
# count = count + Counter(r)
# colorDict = {str(k):v for k,v in dict(count).items()}
# print(colorDict)
target_ds = None
return np_polygon
#-------------------------------------------------------------------------------
def rasterWkt(shpfile, tifffile, outputfile):
'''
Draw WKT Polygon into a Image
'''
inputImage = gdal.Open(tifffile)
rows = inputImage.RasterYSize
cols = inputImage.RasterXSize
geotransform = inputImage.GetGeoTransform()
projection = inputImage.GetProjection()
sr = osr.SpatialReference()
sr.ImportFromWkt(projection)
driverTiff = gdal.GetDriverByName('GTiff')
output = driverTiff.Create(outputfile, cols, rows, inputImage.RasterCount, gdal.GDT_Byte)
np_polygon = getPolygonArray(inputImage, shpfile)
for b in range(1, output.RasterCount+1):
np_input = inputImage.GetRasterBand(b).ReadAsArray(0,0,cols,rows)
output.GetRasterBand(b).SetNoDataValue(0)
output.GetRasterBand(b).WriteArray(np_polygon)#np.bitwise_and(np_polygon, np_input))
output.GetRasterBand(b).FlushCache()
output.SetGeoTransform(geotransform)
output.SetProjection(projection)
#Close files
polygonRaster = None
output = None
inputImage = None
def main(argv):
rasterWkt(argv[1], argv[2], argv[3])
if __name__ == "__main__":
main(sys.argv)
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
import os
import csv
import json
from datetime import date, datetime
sys.path.append("/home/mchc/git/GeoSentinel")
from geosentinel import findProducts
csv.field_size_limit(sys.maxsize)
#===============================================================================
def greenIndexAnalisis(csvfile, cfg):
today = date.today()
startDate = datetime.strptime(cfg["startDate"], '%Y%m%d')
regionList = {}
with open(csvfile) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
regionList[row["CVE_ENT"]] = row["WKT"]
os.chdir(cfg["projectDir"])
if not os.path.isdir(cfg["productLevel"]):
os.mkdir(cfg["productLevel"])
if not os.path.isdir("out"):
os.mkdir("out")
os.chdir("out")
for y in range(today.year, startDate.year-1, -1):
year = str(y)
if not os.path.isdir(year):
os.mkdir(year)
os.chdir(year)
for key, value in regionList.items():
settings = cfg
settings["wkt"]=value
settings["endDate"]= date.today().strftime("%Y%m%d")
findProducts.findSentinelProducts(settings)
#===============================================================================
def main(argv):
if len(argv) != 3:
print ("Usage: " + argv[0] + "<regionList.csv> <settings.json>")
else:
with open(argv[2]) as json_file:
cfg = json.load(json_file)
greenIndexAnalisis(argv[1], cfg)
if __name__ == "__main__":
main(sys.argv)
#!/bin/sh
greenIndexAnalisis.py data/00ent.csv data/settings.json
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment