Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
greenIndex
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Mario Chirinos Colunga
greenIndex
Commits
975095e6
Commit
975095e6
authored
Dec 10, 2020
by
Mario Chirinos
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
sedema update
parent
369dd3c5
Changes
13
Show whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
850 additions
and
27 deletions
+850
-27
GI_runAnalysis.sh
scripts/GI_runAnalysis.sh
+0
-18
GI_L2ASCL_AreaProcessing.sh
scripts/SEDEMA/GI_L2ASCL_AreaProcessing.sh
+341
-0
GI_L2ASCLtoJSON.py
scripts/SEDEMA/GI_L2ASCLtoJSON.py
+70
-0
GI_SCLimageListToJSON.sh
scripts/SEDEMA/GI_SCLimageListToJSON.sh
+9
-0
GI_SCLmorphImageListToJSON.sh
scripts/SEDEMA/GI_SCLmorphImageListToJSON.sh
+9
-0
GI_createDirectories.py
scripts/SEDEMA/GI_createDirectories.py
+13
-9
GI_getComponentsMorphology.py
scripts/SEDEMA/GI_getComponentsMorphology.py
+79
-0
GI_getComponentsMorphology_csv.py
scripts/SEDEMA/GI_getComponentsMorphology_csv.py
+89
-0
GI_getMinMax.py
scripts/SEDEMA/GI_getMinMax.py
+73
-0
GI_rasterShp.py
scripts/SEDEMA/GI_rasterShp.py
+107
-0
geoJsonToJsonWKT.py
scripts/SEDEMA/geoJsonToJsonWKT.py
+0
-0
greenIndexAnalisis.py
scripts/greenIndexAnalisis.py
+57
-0
runAnalisis.sh
scripts/runAnalisis.sh
+3
-0
No files found.
scripts/GI_runAnalysis.sh
deleted
100755 → 0
View file @
369dd3c5
#!/bin/sh
CSV
=
$1
CFG
=
$2
if
[
!
-d
"data"
]
;
then
mkdir
data
fi
cd
data
GI_createDirectories.py ../
$CSV
../
$CFG
for
d
in
$(
ls
-d
*
/
)
;
do
echo
"PROCESSING "
$d
...
if
[
!
-f
$(
pwd
)
/
"
$d
/scl_data.json"
]
;
then
GI_L2ASCL_AreaProcessing.sh
$(
pwd
)
/
"
$d
"
0
else
echo
"PASSING "
$d
fi
done
scripts/GI_L2ASCL_AreaProcessing.sh
→
scripts/
SEDEMA/
GI_L2ASCL_AreaProcessing.sh
View file @
975095e6
...
@@ -19,219 +19,236 @@ echo $wkt
...
@@ -19,219 +19,236 @@ echo $wkt
echo
$USERDIR
echo
$USERDIR
cd
$USERDIR
cd
$USERDIR
#0.- Create Shape File From WKT
##0.- Create Shape File From WKT
echo
"
${
RED
}
Creating Shape file...
${
NC
}
"
#echo "${RED}Creating Shape file...${NC}"
if
[
-d
"myshape"
]
;
then
#if [ -d "myshape" ]; then
rm
-r
myshape
# rm -r myshape
fi
#fi
wktToShape.py
$USERDIR
"findProducts.json"
"myshape"
#wktToShape.py $USERDIR"findProducts.json" "myshape"
#1.- Link L2A products
echo
${
RED
}
"Linking Products..."
${
NC
}
if
[
!
-d
"L2A"
]
;
then
mkdir
L2A
fi
cd
L2A
rm
*
cd
..
yes yes
| findProducts.py
$USERDIR
"findProducts.json"
##1.- Link L2A products
#echo ${RED}"Linking Products..."${NC}
#if [ ! -d "L2A" ]; then
# mkdir L2A
#fi
#cd L2A
# rm *
#cd ..
#yes yes | findProducts.py $USERDIR"findProducts.json"
#2.- Extract Images
echo
${
RED
}
"Extracting JP2 Images..."
${
NC
}
if
[
!
-d
"jp2"
]
;
then
mkdir
jp2
fi
if
[
$BYTILE
-ne
"0"
]
;
then
echo
"BY TILE"
L2AProductListExtractData.sh
$USERDIR
"L2A/"
$USERDIR
"jp2/"
4 1
else
##2.- Extract Images
echo
"BY DATE"
#echo ${RED}"Extracting JP2 Images..."${NC}
L2AProductListExtractData.sh
$USERDIR
"L2A/"
$USERDIR
"jp2/"
4 0
#if [ ! -d "jp2" ]; then
fi
# mkdir jp2
#fi
#if [ $BYTILE -ne "0" ]; then
# echo "BY TILE"
# L2AProductListExtractData.sh $USERDIR"L2A/" $USERDIR"jp2/" 4 1
#3.- Merge Images
#else
echo
${
RED
}
"
\n
Merging Images..."
${
NC
}
# echo "BY DATE"
if
[
$BYTILE
-ne
"0"
]
;
then
# L2AProductListExtractData.sh $USERDIR"L2A/" $USERDIR"jp2/" 4 0
echo
"BY TILE"
#fi
cd
$USERDIR
"jp2/"
if
[
!
-d
"../out"
]
;
then
mkdir
"../out"
fi
# ls -d */ | parallel -q --jobs 1 mergeImagesByDirectory.sh $USERDIR"jp2/"{} "$wtk" 4
for
i
in
$(
ls
-d
*
/
)
;
do
if
[
!
-d
"../out/"
$i
]
;
then
mkdir
"../out/"
$i
fi
mergeImagesByDirectory.sh
$USERDIR
"jp2/"
$i
../../../out/
$i
"
$wkt
"
done
##3.- Merge Images
else
#echo ${RED}"\nMerging Images..."${NC}
echo
"BY DATE"
#if [ $BYTILE -ne "0" ]; then
cd
$USERDIR
"jp2/"
# echo "BY TILE"
if
[
!
-d
"../out"
]
;
then
# cd $USERDIR"jp2/"
mkdir
"../out"
# if [ ! -d "../out" ]; then
fi
# mkdir "../out"
mergeImagesByDirectory.sh
$USERDIR
"jp2/"
$USERDIR
"out/"
"
$wkt
"
4
# fi
## ls -d */ | parallel -q --jobs 1 mergeImagesByDirectory.sh $USERDIR"jp2/"{} "$wtk" 4
# for i in $(ls -d */) ; do
# if [ ! -d "../out/"$i ]; then
# mkdir "../out/"$i
# fi
# mergeImagesByDirectory.sh $USERDIR"jp2/"$i ../../../out/$i "$wkt"
fi
# done
#else
# echo "BY DATE"
# cd $USERDIR"jp2/"
# if [ ! -d "../out" ]; then
# mkdir "../out"
# fi
# mergeImagesByDirectory.sh $USERDIR"jp2/" $USERDIR"out/" "$wkt" 4
#4.- Mask raster with shape file
#fi
echo
${
RED
}
"
\n
Applying Mask..."
${
NC
}
if
[
$BYTILE
-ne
"0"
]
;
then
##4.- Mask raster with shape file
echo
"BY TILE"
#echo ${RED}"\nApplying Mask..."${NC}
cd
$USERDIR
"out/"
for
d
in
$(
ls
-d
*
/
)
;
do
cd
$d
if
[
!
-d
"mask"
]
;
then
mkdir
mask
fi
for
i
in
$(
ls
*
.tif
)
;
do
#if [ $BYTILE -ne "0" ]; then
if
[
!
-e
mask/
$i
]
;
then
# echo "BY TILE"
gdalwarp
-cutline
$USERDIR
"myshape/wkt.shp"
-crop_to_cutline
-dstalpha
$i
mask/
$i
# cd $USERDIR"out/"
else
# for d in $(ls -d */) ; do
echo
"Passing "
$i
# cd $d
fi
# if [ ! -d "mask" ]; then
done
# mkdir mask
cd
..
# fi
done
else
# for i in $(ls *.tif) ; do
echo
"BY DATE"
# if [ ! -e mask/$i ]; then
cd
$USERDIR
"out/"
# gdalwarp -cutline $USERDIR"myshape/wkt.shp" -crop_to_cutline -dstalpha $i mask/$i
if
[
!
-d
"mask"
]
;
then
# else
mkdir
mask
# echo "Passing " $i
fi
# fi
# done
# cd ..
# done
for
i
in
$(
ls
*
.tif
)
;
do
#else
if
[
!
-e
mask/
$i
]
;
then
# echo "BY DATE"
gdalwarp
-cutline
../myshape/wkt.shp
-crop_to_cutline
-dstalpha
$i
mask/
$i
# cd $USERDIR"out/"
else
# if [ ! -d "mask" ]; then
echo
"Passing "
$i
# mkdir mask
fi
# fi
done
fi
#5.- Delete images with few data
# for i in $(ls *.tif) ; do
echo
${
RED
}
"
\n
Deleting Images..."
${
NC
}
# if [ ! -e mask/$i ]; then
# gdalwarp -cutline ../myshape/wkt.shp -crop_to_cutline -dstalpha $i mask/$i
# else
# echo "Passing " $i
# fi
# done
#fi
if
[
$BYTILE
-ne
"0"
]
;
then
##5.- Delete images with few data
echo
"BY TILE"
#echo ${RED}"\nDeleting Images..."${NC}
cd
$USERDIR
"out/"
for
i
in
$(
ls
-d
*
/
)
;
do
cd
$i
"mask"
echo
$i
if
[
!
-d
"SCL"
]
;
then
mkdir
SCL
fi
cp
*
SCL_60m.tif SCL
cd
SCL
ls
*
.tif | parallel
--jobs
4 imageMissingData.py
{}
$USERDIR
"findproducts.json"
70
if
[
!
-d
"../TCI"
]
;
then
mkdir
../TCI
fi
for
scl
in
$(
ls
*
.tif
)
do
fileprefix
=
$(
echo
$scl
|
(
cut
-d
"_"
-f1
)
)
cp
../
$fileprefix
"_TCI_10m.tif"
../TCI/
$fileprefix
"_TCI_10m.tif"
done
cd
$USERDIR
"out/"
done
else
echo
"BY DATE"
cd
$USERDIR
"out/mask/"
if
[
!
-d
"SCL"
]
;
then
mkdir
SCL
fi
cp
*
SCL_60m.tif SCL
cd
SCL
ls
*
.tif | parallel
--jobs
4 imageMissingData.py
{}
$USERDIR
"findproducts.json"
70
if
[
!
-d
"../TCI"
]
;
then
mkdir
../TCI
fi
for
scl
in
$(
ls
*
.tif
)
do
fileprefix
=
$(
echo
$scl
|
(
cut
-d
"_"
-f1
)
)
cp
../
$fileprefix
"_TCI_10m.tif"
../TCI/
$fileprefix
"_TCI_10m.tif"
cd
../TCI
done
fi
#6.-Creating Thumbnails
#echo ${RED}"Creating Thumbnails"${NC}
#if [ $BYTILE -ne "0" ]; then
#if [ $BYTILE -ne "0" ]; then
# echo "BY TILE"
# echo "BY TILE"
# cd $USERDIR"out/"
# cd $USERDIR"out/"
# for i in $(ls -d */) ; do
# for i in $(ls -d */) ; do
# cd $i"mask"
# cd $i"mask"
# if [ ! -d "SCL/thumbnails" ]; then
# echo $i
# mkdir SCL/thumbnails
# if [ ! -d "SCL" ]; then
# mkdir SCL
# fi
# fi
# if [ ! -d "TCI/thumbnails" ]; then
# cp *SCL_60m.tif SCL
# mkdir TCI/thumbnails
# cd SCL
# ls *.tif | parallel --jobs 4 imageMissingData.py {} $USERDIR"findproducts.json" 70
# if [ ! -d "../TCI" ]; then
# mkdir ../TCI
# fi
# fi
# cd TCI
# for scl in $(ls *.tif)
# mogrify -format jpg -quality 100 *.tif
# do
# mogrify -path ./thumbnails -resize 800x600! *.jpg
# fileprefix=$(echo $scl | (cut -d"_" -f1))
# cd thumbnails
# cp ../$fileprefix"_TCI_10m.tif" ../TCI/$fileprefix"_TCI_10m.tif"
# ffmpeg -pattern_type glob -i "*.jpg" -r 1 -vcodec libx264 $USERDIR"video.mp4"
# done
# cd $USERDIR"out/"
# cd $USERDIR"out/"
# done
# done
#else
#else
# echo "BY DATE"
# echo "BY DATE"
# cd $USERDIR"out/mask/"
# cd $USERDIR"out/mask/"
# if [ ! -d "SCL/thumbnails" ]; then
# if [ ! -d "SCL" ]; then
# mkdir SCL/thumbnails
# mkdir SCL
# fi
# if [ ! -d "TCI/thumbnails" ]; then
# mkdir TCI/thumbnails
# fi
# fi
# cp *SCL_60m.tif SCL
# cd SCL
# ls *.tif | parallel --jobs 4 imageMissingData.py {} $USERDIR"findProducts.json" 70
# cd TCI
# if [ ! -d "../TCI" ]; then
# mogrify -format jpg -quality 100 *.tif
# mkdir ../TCI
# mogrify -path ./thumbnails -resize 10% *.jpg
# fi
# for scl in $(ls *.tif)
# do
# fileprefix=$(echo $scl | (cut -d"_" -f1))
# cp ../$fileprefix"_TCI_10m.tif" ../TCI/$fileprefix"_TCI_10m.tif"
# cd ../TCI
# done
#fi
#fi
##6.-Creating Thumbnails
##echo ${RED}"Creating Thumbnails"${NC}
##if [ $BYTILE -ne "0" ]; then
## echo "BY TILE"
## cd $USERDIR"out/"
## for i in $(ls -d */) ; do
## cd $i"mask"
## if [ ! -d "SCL/thumbnails" ]; then
## mkdir SCL/thumbnails
## fi
## if [ ! -d "TCI/thumbnails" ]; then
## mkdir TCI/thumbnails
## fi
## cd TCI
## mogrify -format jpg -quality 100 *.tif
## mogrify -path ./thumbnails -resize 800x600! *.jpg
## cd thumbnails
## ffmpeg -pattern_type glob -i "*.jpg" -r 1 -vcodec libx264 $USERDIR"video.mp4"
## cd $USERDIR"out/"
## done
##else
## echo "BY DATE"
## cd $USERDIR"out/mask/"
## if [ ! -d "SCL/thumbnails" ]; then
## mkdir SCL/thumbnails
## fi
## if [ ! -d "TCI/thumbnails" ]; then
## mkdir TCI/thumbnails
## fi
## cd TCI
## mogrify -format jpg -quality 100 *.tif
## mogrify -path ./thumbnails -resize 10% *.jpg
##fi
##7.-Extract SCL information
#echo ${RED}"Extracting SCL information..."${NC}
#if [ $BYTILE -ne "0" ]; then
# echo "BY TILE"
# cd $USERDIR"out/"
#7.-Extract SCL information
# for i in $(ls -d */) ; do
echo
${
RED
}
"Extracting SCL information..."
${
NC
}
# cd ../out/$i"mask/SCL"
# if [ ! -d "json" ]; then
# mkdir json
# fi
# SCLimageListToJSON.sh $USERDIR"out/"$i"mask/SCL/" $USERDIR"findProducts.json" 6
# mv *.json json
# cd json
# tile=$(echo $i | cut -d "/" -f1)
# mergeL2ASCL_JSON.py $USERDIR"out/"$i"mask/SCL/json/" > $USERDIR$tile"_sclData.json"
# echo $tile
# echo "_sclData.json"
# cd $USERDIR"out/"
# done
#else
# echo "BY DATE"
# cd $USERDIR"out/mask/SCL/"
# if [ ! -d "json" ]; then
# mkdir json
# fi
# GI_SCLimageListToJSON.sh $USERDIR"out/mask/SCL/" $USERDIR"findProducts.json" 6
# mv *.json json
# cd json
# mergeL2ASCL_JSON.py $USERDIR"out/mask/SCL/json/" > $USERDIR"scl_data.json"
#fi
#8.-Extract SCL morphology
echo
${
RED
}
"Extracting SCL morphology..."
${
NC
}
if
[
$BYTILE
-ne
"0"
]
;
then
if
[
$BYTILE
-ne
"0"
]
;
then
echo
"BY TILE"
echo
"BY TILE"
cd
$USERDIR
"out/"
for
i
in
$(
ls
-d
*
/
)
;
do
cd
../out/
$i
"mask/SCL"
if
[
!
-d
"json"
]
;
then
mkdir
json
fi
SCLimageListToJSON.sh
$USERDIR
"out/"
$i
"mask/SCL/"
$USERDIR
"findProducts.json"
6
mv
*
.json json
cd
json
tile
=
$(
echo
$i
|
cut
-d
"/"
-f1
)
mergeL2ASCL_JSON.py
$USERDIR
"out/"
$i
"mask/SCL/json/"
>
$USERDIR$tile
"_sclData.json"
echo
$tile
echo
"_sclData.json"
cd
$USERDIR
"out/"
done
else
else
echo
"BY DATE"
echo
"BY DATE"
cd
$USERDIR
"out/mask/SCL/"
cd
$USERDIR
"out/mask/SCL/"
if
[
!
-d
"
json
"
]
;
then
if
[
!
-d
"
morph
"
]
;
then
mkdir
json
mkdir
morph
fi
fi
SCLi
mageListToJSON.sh
$USERDIR
"out/mask/SCL/"
$USERDIR
"findProducts.json"
6
GI_SCLmorphI
mageListToJSON.sh
$USERDIR
"out/mask/SCL/"
$USERDIR
"findProducts.json"
6
mv
*
.json
json
mv
*
.json
morph
cd
json
cd
morph
mergeL2ASCL_JSON.py
$USERDIR
"out/mask/SCL/
json/"
>
$USERDIR
"scl_data
.json"
mergeL2ASCL_JSON.py
$USERDIR
"out/mask/SCL/
morph/"
>
$USERDIR
"scl_morph
.json"
fi
fi
exit
exit
##8.- Split SCL
##8.- Split SCL
...
...
scripts/SEDEMA/GI_L2ASCLtoJSON.py
0 → 100755
View file @
975095e6
#!/usr/bin/python
# -*- coding: utf-8 -*-
import
osgeo.ogr
as
ogr
import
osgeo.osr
as
osr
import
numpy
as
np
import
gdal
import
sys
import
os
import
json
from
collections
import
Counter
#from collections import OrderedDict
########### lee archivo de configuración ################
dirname
=
os
.
path
.
dirname
(
__file__
)
configfile
=
os
.
path
.
join
(
dirname
,
'../config/config.json'
)
with
open
(
configfile
,
'r'
)
as
f
:
config
=
json
.
load
(
f
)
#print(config['PATHS']['PATH_GEOSENTINEL'])
SENTINEL_PATH
=
config
[
'PATHS'
][
'PATH_GEOSENTINEL'
]
###########################################################
sys
.
path
.
append
(
SENTINEL_PATH
)
from
geosentinel
import
rasterWkt
CLASS_MAP
=
{
0
:
0
,
1
:
1
,
2
:
2
,
3
:
3
,
4
:
4
,
5
:
5
,
6
:
6
,
7
:
7
,
8
:
8
,
9
:
8
,
10
:
8
,
11
:
9
}
#-------------------------------------------------------------------------------
def
L2ASCLtoDict
(
filename
,
wkt
,
wktsub
):
'''
L2ASCLtoDict
'''
classMap
=
lambda
x
:
[
CLASS_MAP
[
xx
]
for
xx
in
x
]
inputImage
=
gdal
.
Open
(
filename
)
rows
,
cols
,
geotransform
=
inputImage
.
RasterYSize
,
inputImage
.
RasterXSize
,
inputImage
.
GetGeoTransform
()
data
=
inputImage
.
GetRasterBand
(
1
)
.
ReadAsArray
(
0
,
0
,
cols
,
rows
)
data1
=
np
.
bitwise_and
(
data
,
rasterWkt
.
getPolygonArray
(
inputImage
,
wkt
))
count1
=
Counter
([])
for
r
in
data
:
count1
=
count1
+
Counter
(
classMap
(
r
))
sclDict1
=
{
str
(
k
):
v
for
k
,
v
in
dict
(
count1
)
.
items
()}
if
wktsub
!=
""
:
data2
=
np
.
bitwise_and
(
data1
,
np
.
invert
(
rasterWkt
.
getPolygonArray
(
inputImage
,
wktsub
)))
count2
=
Counter
([])
for
r
in
data2
:
count2
=
count2
+
Counter
(
classMap
(
r
))
sclDict2
=
{
str
(
k
):
v
for
k
,
v
in
dict
(
count2
)
.
items
()}
else
:
sclDict2
=
sclDict1
return
{
"areaTotal"
:
sclDict1
,
"areaNoConservacion"
:
sclDict2
}
#-------------------------------------------------------------------------------
def
main
(
argv
):
if
len
(
sys
.
argv
)
!=
3
:
print
(
"Usage: "
+
argv
[
0
]
+
"<JSON SCL> <JSON Cfg>"
)
else
:
jsonFile
=
open
(
argv
[
2
])
.
read
()
cfg
=
json
.
loads
(
jsonFile
)
dataDict
=
L2ASCLtoDict
(
argv
[
1
],
cfg
[
'wkt'
],
cfg
[
"wkt2"
])
print
(
json
.
dumps
(
dataDict
,
sort_keys
=
True
))
#-------------------------------------------------------------------------------
if
__name__
==
"__main__"
:
main
(
sys
.
argv
)
scripts/SEDEMA/GI_SCLimageListToJSON.sh
0 → 100755
View file @
975095e6
#!/bin/sh
DIR
=
$1
CGF
=
$2
JOBS
=
${
3
:-
1
}
cd
$DIR
ls
*
.tif | parallel
--jobs
$JOBS
'GI_L2ASCLtoJSON.py '
{}
$2
' > $(echo '
{}
' | cut -d"." -f1).json'
scripts/SEDEMA/GI_SCLmorphImageListToJSON.sh
0 → 100755
View file @
975095e6
#!/bin/sh
DIR
=
$1
CGF
=
$2
JOBS
=
${
3
:-
1
}
cd
$DIR
ls
*
.tif | parallel
--jobs
$JOBS
'GI_getComponentsMorphology.py '
{}
$2
' > $(echo '
{}
' | cut -d"." -f1).json'
scripts/GI_createDirectories.py
→
scripts/
SEDEMA/
GI_createDirectories.py
View file @
975095e6
...
@@ -14,16 +14,17 @@ def createDirectories(datafile, cfgfile, level1="CVE_ENT", level2="CVE_MUN"):
...
@@ -14,16 +14,17 @@ def createDirectories(datafile, cfgfile, level1="CVE_ENT", level2="CVE_MUN"):
data
=
csv
.
DictReader
(
csvfile
,
delimiter
=
','
)
data
=
csv
.
DictReader
(
csvfile
,
delimiter
=
','
)
for
row
in
data
:
for
row
in
data
:
mun
=
row
[
level
1
]
mun
=
row
[
level
2
]
ent
=
row
[
level
2
]
ent
=
row
[
level
1
]
dirname
=
ent
+
mun
dirname
=
ent
.
zfill
(
2
)
+
"-"
+
mun
.
zfill
(
3
)
print
(
ent
)
print
(
ent
)
# for k in cfg.keys():
# for k in cfg.keys():
# p[k]=cfg[k]
# p[k]=cfg[k]
print
(
os
.
getcwd
())
print
(
os
.
getcwd
())
cfg
[
"projectDir"
]
=
os
.
getcwd
()
+
"/"
+
dirname
+
"/"
cfg
[
"projectDir"
]
=
os
.
getcwd
()
+
"/"
+
dirname
+
"/"
cfg
[
"endDate"
]
=
datetime
.
datetime
.
now
()
.
strftime
(
"
%
Y
%
m
%
d"
)
#
cfg["endDate"]=datetime.datetime.now().strftime("%Y%m%d")
cfg
[
"wkt"
]
=
row
[
"WKT"
]
cfg
[
"wkt"
]
=
row
[
"WKT"
]
cfg
[
"wkt2"
]
=
row
[
"WKT2"
]
if
not
os
.
path
.
exists
(
dirname
):
if
not
os
.
path
.
exists
(
dirname
):
os
.
mkdir
(
dirname
)
os
.
mkdir
(
dirname
)
with
open
(
dirname
+
'/findProducts.json'
,
'w'
)
as
outfile
:
with
open
(
dirname
+
'/findProducts.json'
,
'w'
)
as
outfile
:
...
@@ -31,11 +32,14 @@ def createDirectories(datafile, cfgfile, level1="CVE_ENT", level2="CVE_MUN"):
...
@@ -31,11 +32,14 @@ def createDirectories(datafile, cfgfile, level1="CVE_ENT", level2="CVE_MUN"):
#===============================================================================
#===============================================================================
def
main
(
argv
):
def
main
(
argv
):
if
len
(
sys
.
argv
)
!=
3
:
if
len
(
sys
.
argv
)
!=
3
and
len
(
sys
.
argv
)
!=
5
:
print
(
len
(
sys
.
argv
))
print
(
len
(
sys
.
argv
))
print
(
"Usage: "
+
argv
[
0
]
+
" <csv> <cfg>"
)
print
(
"Usage: "
+
argv
[
0
]
+
" <csv> <cfg>"
)
else
:
elif
(
len
(
sys
.
argv
)
==
3
)
:
createDirectories
(
argv
[
1
],
argv
[
2
])
createDirectories
(
argv
[
1
],
argv
[
2
])
else
:
createDirectories
(
argv
[
1
],
argv
[
2
],
argv
[
3
],
argv
[
4
])
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
...
...
scripts/SEDEMA/GI_getComponentsMorphology.py
0 → 100755
View file @
975095e6
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import
sys
import
os
import
json
import
csv
import
gdal
import
numpy
as
np
import
osgeo.ogr
as
ogr
,
osr
sys
.
path
.
append
(
"/home/geoint/GeoSentinel"
)
from
geosentinel
import
rasterWkt
from
skimage
import
measure
AREAm
=
60
*
60
#m²
AREAkm
=
AREAm
/
1000000
#km²
DISTANCE
=
60
#m
#===============================================================================
def
getComponentsMorphology
(
image_filename
,
wkt
,
wktsub
):
inputImage
=
gdal
.
Open
(
image_filename
)
rows
,
cols
,
geotransform
=
inputImage
.
RasterYSize
,
inputImage
.
RasterXSize
,
inputImage
.
GetGeoTransform
()
data
=
inputImage
.
GetRasterBand
(
1
)
.
ReadAsArray
(
0
,
0
,
cols
,
rows
)
data1
=
np
.
bitwise_and
(
data
,
rasterWkt
.
getPolygonArray
(
inputImage
,
wkt
))
if
wktsub
!=
""
:
data2
=
np
.
bitwise_and
(
data1
,
np
.
invert
(
rasterWkt
.
getPolygonArray
(
inputImage
,
wktsub
)))
else
:
data2
=
data1
geometry
=
ogr
.
CreateGeometryFromWkt
(
wkt
)
inSpatialRef
=
osr
.
SpatialReference
()
inSpatialRef
.
ImportFromEPSG
(
4326
)
outSpatialRef
=
osr
.
SpatialReference
()
outSpatialRef
.
ImportFromEPSG
(
32514
)
coordTrans
=
osr
.
CoordinateTransformation
(
inSpatialRef
,
outSpatialRef
)
geometry
.
Transform
(
coordTrans
)
wktArea
=
geometry
.
GetArea
()
classDict
=
{}
for
i
in
range
(
12
):
myDict
=
dict
()
dataC
=
data2
==
i
myDict
[
"area_tot"
]
=
wktArea
/
1000000
myDict
[
"area_verde_tot"
]
=
np
.
count_nonzero
(
dataC
)
if
myDict
[
"area_verde_tot"
]
==
0.0
:
continue
myDict
[
"area_verde_tot"
]
=
myDict
[
"area_verde_tot"
]
*
AREAkm
components_labels
=
measure
.
label
(
dataC
)
myDict
[
"nAreas"
]
=
int
(
np
.
max
(
components_labels
))
properties
=
measure
.
regionprops
(
components_labels
)
myDict
[
"gf1"
]
=
1
/
(
myDict
[
"area_verde_tot"
]
/
myDict
[
"nAreas"
])
# myDict["gf2"]
myDict
[
"gdv"
]
=
myDict
[
"area_verde_tot"
]
/
myDict
[
"area_tot"
]
myDict
[
"vp"
]
=
myDict
[
"area_verde_tot"
]
myDict
[
"tp"
]
=
myDict
[
"area_verde_tot"
]
/
myDict
[
"nAreas"
]
myDict
[
"fp"
]
=
sum
([
p
[
"perimeter"
]
/
p
[
"area"
]
for
p
in
properties
])
/
myDict
[
"nAreas"
]
myDict
[
"alp"
]
=
sum
([
0
if
p
[
"minor_axis_length"
]
==
0
else
p
[
"major_axis_length"
]
/
p
[
"minor_axis_length"
]
for
p
in
properties
])
/
myDict
[
"nAreas"
]
classDict
[
str
(
i
)]
=
myDict
# print(myDict)
# print(classDict)
return
classDict
#===============================================================================
def
main
(
argv
):
if
len
(
sys
.
argv
)
!=
3
:
print
(
argv
[
0
]
+
" <image.tif> <cfg.json>"
)
else
:
with
open
(
argv
[
2
])
as
jsonFile
:
cfg
=
json
.
load
(
jsonFile
)
dataDict
=
getComponentsMorphology
(
argv
[
1
],
cfg
[
"wkt"
],
cfg
[
"wkt2"
])
print
(
json
.
dumps
(
dataDict
,
sort_keys
=
True
))
if
__name__
==
"__main__"
:
main
(
sys
.
argv
)
scripts/SEDEMA/GI_getComponentsMorphology_csv.py
0 → 100755
View file @
975095e6
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import
sys
import
os
import
json
import
csv
import
gdal
import
numpy
as
np
import
osgeo.ogr
as
ogr
,
osr
sys
.
path
.
append
(
"/home/mchc/git/GeoSentinel"
)
from
geosentinel
import
rasterWkt
from
skimage
import
measure
AREAm
=
60
*
60
#m²
AREAkm
=
AREAm
/
1000000
#km²
DISTANCE
=
60
#m
#===============================================================================
def
getComponentsMorphology
(
image_filename
,
wktlist
):
inputImage
=
gdal
.
Open
(
image_filename
)
rows
,
cols
,
geotransform
=
inputImage
.
RasterYSize
,
inputImage
.
RasterXSize
,
inputImage
.
GetGeoTransform
()
data
=
inputImage
.
GetRasterBand
(
1
)
.
ReadAsArray
(
0
,
0
,
cols
,
rows
)
areaList
=
[]
for
key
,
value
in
wktlist
.
items
():
print
(
key
)
dataMask
=
np
.
bitwise_and
(
data
,
rasterWkt
.
getPolygonArray
(
inputImage
,
value
))
geometry
=
ogr
.
CreateGeometryFromWkt
(
value
)
inSpatialRef
=
osr
.
SpatialReference
()
inSpatialRef
.
ImportFromEPSG
(
4326
)
outSpatialRef
=
osr
.
SpatialReference
()
outSpatialRef
.
ImportFromEPSG
(
32514
)
coordTrans
=
osr
.
CoordinateTransformation
(
inSpatialRef
,
outSpatialRef
)
geometry
.
Transform
(
coordTrans
)
wktArea
=
geometry
.
GetArea
()
print
(
key
,
wktArea
/
1000000
)
myDict
=
dict
()
myDict
[
"clave"
]
=
key
dataC
=
dataMask
!=
0
#==i
myDict
[
"area_tot"
]
=
wktArea
/
1000000
myDict
[
"area_verde_tot"
]
=
np
.
count_nonzero
(
dataC
)
if
myDict
[
"area_verde_tot"
]
==
0.0
:
continue
myDict
[
"area_verde_tot"
]
=
myDict
[
"area_verde_tot"
]
*
AREAkm
components_labels
=
measure
.
label
(
dataC
)
myDict
[
"nAreas"
]
=
int
(
np
.
max
(
components_labels
))
properties
=
measure
.
regionprops
(
components_labels
)
myDict
[
"gf1"
]
=
1
/
(
myDict
[
"area_verde_tot"
]
/
myDict
[
"nAreas"
])
## myDict["gf2"]
myDict
[
"gdv"
]
=
myDict
[
"area_verde_tot"
]
/
myDict
[
"area_tot"
]
myDict
[
"vp"
]
=
myDict
[
"area_verde_tot"
]
myDict
[
"tp"
]
=
myDict
[
"area_verde_tot"
]
/
myDict
[
"nAreas"
]
myDict
[
"fp"
]
=
sum
([
p
[
"perimeter"
]
/
p
[
"area"
]
for
p
in
properties
])
/
myDict
[
"nAreas"
]
## print("axis", [ p["minor_axis_length"] for p in properties])
myDict
[
"alp"
]
=
sum
([
0
if
p
[
"minor_axis_length"
]
==
0
else
p
[
"major_axis_length"
]
/
p
[
"minor_axis_length"
]
for
p
in
properties
])
/
myDict
[
"nAreas"
]
# classDict[str(i)]=myDict
areaList
.
append
(
myDict
)
print
(
myDict
)
with
open
(
'morphology.csv'
,
'w'
,
newline
=
''
)
as
csvfile
:
writer
=
csv
.
DictWriter
(
csvfile
,
fieldnames
=
areaList
[
0
]
.
keys
())
writer
.
writeheader
()
for
i
in
areaList
:
writer
.
writerow
(
i
)
## print(classDict)
# return classDict
#===============================================================================
def
main
(
argv
):
if
len
(
sys
.
argv
)
!=
3
:
print
(
"Usage: "
+
argv
[
0
]
+
" <reference_image.tif> <wkt_list.csv>"
)
else
:
with
open
(
argv
[
2
])
as
csvfile
:
reader
=
csv
.
DictReader
(
csvfile
)
areasDic
=
{
row
[
"CVEGEO"
]:
row
[
"WKT"
]
for
row
in
reader
}
dataDict
=
getComponentsMorphology
(
argv
[
1
],
areasDic
)
if
__name__
==
"__main__"
:
main
(
sys
.
argv
)
scripts/GI_getMinMax.py
→
scripts/
SEDEMA/
GI_getMinMax.py
View file @
975095e6
...
@@ -8,32 +8,58 @@ import csv
...
@@ -8,32 +8,58 @@ import csv
def
getMinMax
(
clase
):
def
getMinMax
(
clase
):
polylist
=
[]
polylist
=
[]
morphList
=
[]
for
item
in
os
.
listdir
(
"."
):
for
item
in
os
.
listdir
(
"."
):
if
os
.
path
.
isdir
(
item
):
if
os
.
path
.
isdir
(
item
):
print
(
item
)
print
(
item
)
with
open
(
os
.
getcwd
()
+
"/"
+
item
+
"/scl_data.json"
)
as
json_file
:
with
open
(
os
.
getcwd
()
+
"/"
+
item
+
"/scl_data.json"
)
as
json_file
:
dataSCL
=
json
.
load
(
json_file
)
dataSCL
=
json
.
load
(
json_file
)
with
open
(
os
.
getcwd
()
+
"/"
+
item
+
"/scl_morph.json"
)
as
json_file
:
dataMorph
=
json
.
load
(
json_file
)
with
open
(
os
.
getcwd
()
+
"/"
+
item
+
"/findProducts.json"
)
as
json_file
:
with
open
(
os
.
getcwd
()
+
"/"
+
item
+
"/findProducts.json"
)
as
json_file
:
dataWKT
=
json
.
load
(
json_file
)
dataWKT
=
json
.
load
(
json_file
)
wkt
=
dataWKT
[
"wkt"
]
wkt
=
dataWKT
[
"wkt"
]
values
=
[]
for
k
,
v
in
dataSCL
.
items
():
values
.
append
(
v
[
clase
])
minval
=
min
(
values
)
maxval
=
max
(
values
)
polylist
.
append
({
"clave"
:
item
,
"wkt"
:
wkt
,
"min"
:
minval
,
"max"
:
maxval
})
polyDict
=
{
"clave"
:
item
,
"wkt"
:
wkt
}
cDict
=
{
k
:
v
[
"areaTotal"
][
clase
]
for
k
,
v
in
dataSCL
.
items
()
}
maxkey
=
max
(
cDict
,
key
=
cDict
.
get
)
maxval
=
dataSCL
[
maxkey
][
"areaTotal"
][
clase
]
polyDict
[
"areaTotal"
]
=
maxval
cDict
=
{
k
:
v
[
"areaNoConservacion"
][
clase
]
for
k
,
v
in
dataSCL
.
items
()
if
clase
in
v
[
"areaNoConservacion"
]}
if
len
(
cDict
)
>
0
and
clase
in
dataSCL
[
maxkey
][
"areaNoConservacion"
]:
maxkey
=
max
(
cDict
,
key
=
cDict
.
get
)
maxval
=
dataSCL
[
maxkey
][
"areaNoConservacion"
][
clase
]
else
:
maxval
=
0
polyDict
[
"areaNoConservacion"
]
=
maxval
polylist
.
append
(
polyDict
)
tmpDict
=
dataMorph
[
maxkey
][
clase
]
tmpDict
[
"wkt"
]
=
wkt
tmpDict
[
"clave"
]
=
item
morphList
.
append
(
dataMorph
[
maxkey
][
clase
])
with
open
(
'minmax_'
+
clase
+
'.csv'
,
'w'
)
as
outfile
:
with
open
(
'minmax_'
+
clase
+
'.csv'
,
'w'
)
as
outfile
:
writer
=
csv
.
DictWriter
(
outfile
,
fieldnames
=
polylist
[
0
]
.
keys
())
writer
=
csv
.
DictWriter
(
outfile
,
fieldnames
=
polylist
[
0
]
.
keys
())
writer
.
writeheader
()
writer
.
writeheader
()
for
item
in
polylist
:
for
item
in
polylist
:
writer
.
writerow
(
item
)
writer
.
writerow
(
item
)
# json.dump(polylist, outfile)
with
open
(
'morphology_'
+
clase
+
'.csv'
,
'w'
)
as
outfile
:
writer
=
csv
.
DictWriter
(
outfile
,
fieldnames
=
morphList
[
0
]
.
keys
())
writer
.
writeheader
()
for
item
in
morphList
:
writer
.
writerow
(
item
)
#===============================================================================
#===============================================================================
def
main
(
argv
):
def
main
(
argv
):
...
...
scripts/SEDEMA/GI_rasterShp.py
0 → 100755
View file @
975095e6
#!/usr/bin/python
# -*- coding: utf-8 -*-
#"POLYGON((-89.65513229370117 21.048938357786966,-89.62852478027342 21.047816903874505,-89.6261215209961 21.03339745836918,-89.6465492248535 21.022822311328852,-89.65873718261717 21.039325621609095,-89.65513229370117 21.048938357786966))"
import
osgeo.ogr
as
ogr
import
osgeo.osr
as
osr
import
numpy
as
np
import
gdal
import
sys
from
collections
import
Counter
#-------------------------------------------------------------------------------
def
getPolygonArray
(
image
,
shp_filename
):
rows
=
image
.
RasterYSize
cols
=
image
.
RasterXSize
geotransform
=
image
.
GetGeoTransform
()
projection
=
image
.
GetProjection
()
sr
=
osr
.
SpatialReference
(
wkt
=
projection
)
sr4326
=
osr
.
SpatialReference
()
sr4326
.
ImportFromEPSG
(
4326
)
driverMEM
=
gdal
.
GetDriverByName
(
'MEM'
)
driverMemory
=
ogr
.
GetDriverByName
(
'Memory'
)
driverTiff
=
gdal
.
GetDriverByName
(
'GTiff'
)
driverShape
=
ogr
.
GetDriverByName
(
"ESRI Shapefile"
)
#RASTER
target_ds
=
driverMEM
.
Create
(
'raster.tif'
,
cols
,
rows
,
1
,
gdal
.
GDT_Byte
)
#MEM
target_ds
.
GetRasterBand
(
1
)
.
SetNoDataValue
(
0
)
target_ds
.
GetRasterBand
(
1
)
.
WriteArray
(
np
.
zeros
((
rows
,
cols
),
dtype
=
np
.
uint8
))
target_ds
.
SetProjection
(
projection
)
target_ds
.
SetGeoTransform
(
geotransform
)
target_ds
.
GetRasterBand
(
1
)
.
FlushCache
()
#VECTOR
dataSource
=
driverShape
.
Open
(
shp_filename
,
0
)
# Check to see if shapefile is found.
if
dataSource
is
None
:
print
(
'Could not open'
,
shp_filename
)
else
:
print
(
'Opened
%
s'
,
shp_filename
)
layer
=
dataSource
.
GetLayer
()
featureCount
=
layer
.
GetFeatureCount
()
print
(
"Number of features in"
,
shp_filename
,
featureCount
)
# rast_ogr_ds = driverMemory.CreateDataSource( 'myshape.shp' )
# rast_mem_lyr = rast_ogr_ds.CreateLayer( 'POLYGON', sr4326, ogr.wkbPolygon)
# feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
# feat.SetGeometry( ogr.CreateGeometryFromWkt(wkt_geom) )
# rast_mem_lyr.CreateFeature( feat )
err
=
gdal
.
RasterizeLayer
(
target_ds
,
[
1
],
layer
,
burn_values
=
[
255
])
if
err
!=
0
:
print
(
err
)
gdaltest
.
post_reason
(
'got non-zero result code from RasterizeLayer'
)
return
'fail'
target_ds
.
GetRasterBand
(
1
)
.
FlushCache
()
np_polygon
=
target_ds
.
GetRasterBand
(
1
)
.
ReadAsArray
(
0
,
0
,
cols
,
rows
)
count
=
Counter
([])
# for r in np_polygon:
# count = count + Counter(r)
# colorDict = {str(k):v for k,v in dict(count).items()}
# print(colorDict)
target_ds
=
None
return
np_polygon
#-------------------------------------------------------------------------------
def
rasterWkt
(
shpfile
,
tifffile
,
outputfile
):
'''
Draw WKT Polygon into a Image
'''
inputImage
=
gdal
.
Open
(
tifffile
)
rows
=
inputImage
.
RasterYSize
cols
=
inputImage
.
RasterXSize
geotransform
=
inputImage
.
GetGeoTransform
()
projection
=
inputImage
.
GetProjection
()
sr
=
osr
.
SpatialReference
()
sr
.
ImportFromWkt
(
projection
)
driverTiff
=
gdal
.
GetDriverByName
(
'GTiff'
)
output
=
driverTiff
.
Create
(
outputfile
,
cols
,
rows
,
inputImage
.
RasterCount
,
gdal
.
GDT_Byte
)
np_polygon
=
getPolygonArray
(
inputImage
,
shpfile
)
for
b
in
range
(
1
,
output
.
RasterCount
+
1
):
np_input
=
inputImage
.
GetRasterBand
(
b
)
.
ReadAsArray
(
0
,
0
,
cols
,
rows
)
output
.
GetRasterBand
(
b
)
.
SetNoDataValue
(
0
)
output
.
GetRasterBand
(
b
)
.
WriteArray
(
np_polygon
)
#np.bitwise_and(np_polygon, np_input))
output
.
GetRasterBand
(
b
)
.
FlushCache
()
output
.
SetGeoTransform
(
geotransform
)
output
.
SetProjection
(
projection
)
#Close files
polygonRaster
=
None
output
=
None
inputImage
=
None
def
main
(
argv
):
rasterWkt
(
argv
[
1
],
argv
[
2
],
argv
[
3
])
if
__name__
==
"__main__"
:
main
(
sys
.
argv
)
scripts/geoJsonToJsonWKT.py
→
scripts/
SEDEMA/
geoJsonToJsonWKT.py
View file @
975095e6
File moved
scripts/greenIndexAnalisis.py
0 → 100755
View file @
975095e6
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import
sys
import
os
import
csv
import
json
from
datetime
import
date
,
datetime
sys
.
path
.
append
(
"/home/mchc/git/GeoSentinel"
)
from
geosentinel
import
findProducts
csv
.
field_size_limit
(
sys
.
maxsize
)
#===============================================================================
def
greenIndexAnalisis
(
csvfile
,
cfg
):
today
=
date
.
today
()
startDate
=
datetime
.
strptime
(
cfg
[
"startDate"
],
'
%
Y
%
m
%
d'
)
regionList
=
{}
with
open
(
csvfile
)
as
csvfile
:
reader
=
csv
.
DictReader
(
csvfile
)
for
row
in
reader
:
regionList
[
row
[
"CVE_ENT"
]]
=
row
[
"WKT"
]
os
.
chdir
(
cfg
[
"projectDir"
])
if
not
os
.
path
.
isdir
(
cfg
[
"productLevel"
]):
os
.
mkdir
(
cfg
[
"productLevel"
])
if
not
os
.
path
.
isdir
(
"out"
):
os
.
mkdir
(
"out"
)
os
.
chdir
(
"out"
)
for
y
in
range
(
today
.
year
,
startDate
.
year
-
1
,
-
1
):
year
=
str
(
y
)
if
not
os
.
path
.
isdir
(
year
):
os
.
mkdir
(
year
)
os
.
chdir
(
year
)
for
key
,
value
in
regionList
.
items
():
settings
=
cfg
settings
[
"wkt"
]
=
value
settings
[
"endDate"
]
=
date
.
today
()
.
strftime
(
"
%
Y
%
m
%
d"
)
findProducts
.
findSentinelProducts
(
settings
)
#===============================================================================
def
main
(
argv
):
if
len
(
argv
)
!=
3
:
print
(
"Usage: "
+
argv
[
0
]
+
"<regionList.csv> <settings.json>"
)
else
:
with
open
(
argv
[
2
])
as
json_file
:
cfg
=
json
.
load
(
json_file
)
greenIndexAnalisis
(
argv
[
1
],
cfg
)
if
__name__
==
"__main__"
:
main
(
sys
.
argv
)
scripts/runAnalisis.sh
0 → 100644
View file @
975095e6
#!/bin/sh
greenIndexAnalisis.py data/00ent.csv data/settings.json
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment