Last active
June 4, 2021 21:52
-
-
Save aolinto/86d2091a5337c1bbcec888e37ca884b3 to your computer and use it in GitHub Desktop.
Copernicus Multi Observation Global Ocean 3D Temperature Salinity
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # ======================================================================================================================= | |
| # Antonio Olinto Avila-da-Silva, Instituto de Pesca, Brasil | |
| # version 2021/04/19 | |
| # https://gist.github.com/aolinto/86d2091a5337c1bbcec888e37ca884b3 | |
| # script to process Multi Observation Global Ocean 3D Temperature Salinity | |
| # product MULTIOBS_GLO_PHY_TSUV_3D_MYNRT_015_012 | |
| # nc file downloaded from | |
| # https://resources.marine.copernicus.eu/?option=com_csw&view=details&product_id=MULTIOBS_GLO_PHY_TSUV_3D_MYNRT_015_012 | |
| # dataset-armor-3d-rep-monthly | |
| # the script will transform nc file to brick raster, read temperature and salinity data | |
| # for a given area and depth, compute its statistics and write them into | |
| # a single csv file named COPERNICUS_to-so.csv | |
| # nc help http://geog.uoregon.edu/GeogR/topics/netCDF-read-ncdf4.html | |
| # ======================================================================================================================= | |
| # load libraries | |
| # -------------- | |
| # install.packages(c("ncdf4","rgdal","maptools","raster")) | |
| # ncdf4 needs libnetcdf-dev netcdf-bin in Linux | |
| # rgdal needs gdal-bin proj-bin libgdal-dev libproj-dev in Linux | |
| library(rgdal) | |
| library(maptools) | |
| library(raster) | |
| library(ncdf4) | |
| # set workspace | |
| # -------------- | |
| # list and remove objects from workspace | |
| ls() | |
| rm(list = ls()) | |
| # set working directory | |
| setwd("/.../Multiobs") # indicate the path to the nc file | |
| # verify the existence of COPERNICUS_to-so.csv file | |
| file.exists("COPERNICUS_to-so.csv") # caution! new data will be appended to this file if it already exists | |
| # if TRUE choose an option | |
| #~ file.rename("COPERNICUS_to-so.csv","COPERNICUS_to-so.old") | |
| #~ file.remove("COPERNICUS_to-so.csv") | |
| # load shapefiles | |
| # --------------- | |
| # study area | |
| shp.area <- readOGR("/home/antonio/MEGAsync/MobileOffice/Estudos/Camarão-branco/Shapes","CamLegQdd5") # indicate the location and name of the shapefile | |
| shp.area <- spTransform(shp.area, CRS("+proj=longlat +datum=WGS84")) | |
| proj4string(shp.area) | |
| summary(shp.area) | |
| # land area | |
| shp.land0 <- getData('GADM', country='BRA', level=0) | |
| shp.land0 <- spTransform(shp.land0, CRS("+proj=longlat +datum=WGS84")) | |
| proj4string(shp.land0) | |
| shp.land2 <- getData('GADM', country='BRA', level=2) | |
| shp.land2 <- spTransform(shp.land2, CRS("+proj=longlat +datum=WGS84")) | |
| proj4string(shp.land2) | |
| # plot study area and land | |
| plot(shp.area,col="lightblue",axes=T) | |
| plot(shp.land2,border="darkgreen",col="gray",add=T) | |
| # check ncfile information | |
| # ------------------------ | |
| nc.data <- nc_open("dataset-armor-3d-rep-monthly_1618679649385.nc") | |
| nc.data | |
| #. list of the nc variable names | |
| var.names <- attributes(nc.data$var)$names | |
| var.names | |
| #. variables settings | |
| ncatt_get(nc.data,var.names[1]) | |
| ncatt_get(nc.data,var.names[2]) | |
| #. list of the nc dimensions names | |
| attributes(nc.data$dim)$names | |
| #. dimensions info | |
| ncatt_get(nc.data,"time") | |
| ncatt_get(nc.data,"time","units") | |
| (time_min <- ncatt_get(nc.data,"time","valid_min")$value) | |
| as.Date(time_min/24, origin="1950-01-01") | |
| (time_max <- ncatt_get(nc.data,"time","valid_max")$value) | |
| as.Date(time_max/24, origin="1950-01-01") | |
| ncvar_get(nc.data,"time") | |
| as.Date(ncvar_get(nc.data,"time")/24, origin="1950-01-01") | |
| ncatt_get(nc.data,"depth") | |
| var.depths <- ncvar_get(nc.data,"depth") | |
| var.depths | |
| ncatt_get(nc.data,"latitude") | |
| ncvar_get(nc.data,"latitude") | |
| ncatt_get(nc.data,"longitude") | |
| ncvar_get(nc.data,"longitude") | |
| #. close | |
| nc_close(nc.data) | |
| # ================================== | |
| # begin loopings for data extraction | |
| # ================================== | |
| for (dl in 1:length(var.depths)) { # looping for depths | |
| dls <- paste0(var.depths[dl],"-",var.depths[dl+1]) | |
| for (br in 1:length(var.names)) { # looping for variables | |
| # brickraster | |
| # ----------- | |
| # create a brickraster and transform long axis | |
| rst.data <- brick("dataset-armor-3d-rep-monthly_1618679649385.nc",varname=var.names[br],level=dl) # indicate the nc file, varname (to, so) and level (depth layer 1 to 7: 0, 10, ..., 100) | |
| rst.data <- rotate(rst.data) # 0,360 to -180,180 longitude | |
| # crop data from the area | |
| # ----------------------- | |
| # get the spatial extent of the shapefile | |
| ext.area <- extent(shp.area) | |
| # crop the raster to area extent | |
| crp.data <- crop(rst.data,ext.area,snap="out") | |
| # create a dummy raster with NAs | |
| crp.na <- setValues(crp.data[[1]],NA) | |
| # create a raster mask with the area boundaries | |
| rst.mask <- rasterize(shp.area,crp.na) | |
| # apply the mask to the raster with data | |
| msk.data <- mask(x=crp.data,mask=rst.mask) | |
| # looping for data statistical calculation | |
| for (i in seq(1,nlayers(rst.data))) { # looping year/month | |
| # get data | |
| year <- as.numeric(substr(getZ(rst.data)[i],1,4)) | |
| month <- as.numeric(substr(getZ(rst.data)[i],6,7)) | |
| # get statistics | |
| sta.min <- cellStats(msk.data[[i]], stat='min',na.rm=TRUE) | |
| sta.mean <- cellStats(msk.data[[i]], stat='mean',na.rm=TRUE) | |
| sta.median <- median(na.omit(values(msk.data[[i]]))) | |
| sta.max <- cellStats(msk.data[[i]], stat='max',na.rm=TRUE) | |
| # prepare final data set | |
| dat.output <- data.frame(var.depths[dl],dls,var.names[br],year,month,sta.min,sta.mean,sta.median,sta.max) | |
| names(dat.output)<-c("depth","depth_range","variable","year","month","min","mean","median","max") | |
| # save csv file | |
| fe <- file.exists("COPERNICUS_to-so.csv") | |
| write.table(dat.output,"COPERNICUS_to-so.csv",row.names=FALSE,col.names=!fe,sep=",",dec=".",append=fe) # change separator and decimal strings if necessary | |
| # clean workspace | |
| rm(year,month,sta.min,sta.mean,sta.median,sta.max,dat.output,fe) | |
| } # finish looping for year/month | |
| rm(i,rst.data,ext.area,crp.data,crp.na,rst.mask,msk.data) | |
| } # finish looping for variables | |
| } # finish looping for depth levels | |
| # =================================== | |
| # finish loopings for data extraction | |
| # =================================== |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment