Abdi-Basid
ADAN
"The purpose of this document is to consolidate
and improve the various R scripts used to perform the cited analyses."
Table of Contents
# 1 - PRELIMINARY INFORMATION:
# 2 – IMPORT, CONVERSION AND
EXTRACTION NETCF4 FILE:
# 3 - CONVERT POINTS
TO COORDINATE WGS 1984:
# 4 - READ HDF5 AND
JSON FILES:
# 5– ACCESSING
REANALYSIS, CLIMATE INDICES, AND OTHER CLIMATOLOGICAL DATA IN R
# 6 - STOCHASTIC
DOWNSCALING METHODS:
# 7– PERFORM ANALYSES
ON THE PIXEL:
# 1 - PRELIMINARY INFORMATION:
#|Downscaling
coarse scale global climate model (GCM) output to a fine spatial resolution.
#|Bias-Corrected Spatial Downscaling (BCDS),
Constructed Analogues(CA), Climate #|Imprint (CI), and Bias
Correction/Constructed
#|Analogues with Quantile mapping reordering (BCCAQ) developed
by
#|the Pacific Climate Impacts Consortium (PCIC),
Victoria, British Columbia, Canada.
#|Precipitation & Temperature CORDEX-CMIP |
# 1 kg.m²-1 en 86400 mm/day | -273.15 temperature en
degree depuis kelvin
#
# 26 Daily Predictors of model = SDSM DOWNSCALING
METHOD
#
3 MCG: CMIP3-CMIP5-CMIP6
#
4 voies socio-économiques partagées
(SSP): SSP1-2.6, SSP2-4.5, SSP3-7. 0, et SSP5-8.5
# WorldClim
Statistical Downscaling: Delta Method (spline spatial interpolation of
anomalies (deltas))
#
# RCP
1.9: 1.5°C - limite le réchauffement de la planète à
moins de 1,5 °C.
# RCP
2.6: 2.0°C - gaz à effet de serre très bas
# RCP
3.4: 2.4°C
# RCP
4.5: 3.0°C - stabilisé avant 2100
# RCP
6.0: 3.5°C - stabilisé après 2100
# RCP
7.5: 4.0°C
# RCP
8.5: 5.0°C - Augmentation des émissions de gaz à effet
de serre
#
# RCP :
Representative Concentration Pathway.
# SSP :
Shared Socioeconomic Pathways.
# CMIP :
Coupled Model Intercomparison Project
#
#| Empirical/Statistical Downscaling : Delta method | Climgen
#| Dynamical Downscaling : Eta Model | PRECIS
#| Spatial Disaggregation
# 2 – IMPORT, CONVERSION AND
EXTRACTION NETCF4 FILE:
library(ncdf4)
ncpath <- "C:/Abdi-Basid
ADAN/0.Doc.On.Going/1.Statistical Downscaling Project/Data/";
setwd(ncpath)
ncname <-
"chirps-v2.0.1981.days_p05"
ncfname <- paste(ncpath, ncname, ".nc",
sep="")
ncin <- nc_open(ncfname)
print(ncin)
attributes(ncin$dim)
attributes(ncin$var)3
# EXTRACTION
Pixel :
require(sf);shpe <- read_sf("C:/Users/hp/Documents/ArcGIS
Input/Djibouti for mask/MASK SHAPEFILE Djibouti/Regions.shp")
require(readOGR);shpe <-
readOGR("C:/Users/hp/Documents/ArcGIS Input/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
#
require(raster)
crs(shpe)
pre1.brick =
brick("chirps-v2.0.1981.days_p05.nc")
crs(pre1.brick)
shp = spTransform(shpe, crs(pre1.brick))
# pre1.mask = mask(pre1.brick, shp)
pre1.mask = crop(pre1.brick, shp)
class(pre1.mask )
dim(pre1.mask )
#
brk <- c(0,0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75 , 2
,2.25);length(brk)
require(Hmisc); col1 <-
colorRampPalette(c("blue4","blue","cyan","green",
"yellow", "red", "red4"))(50)
x11();plot( pre1.mask[[1]], xlim=c(41.5, 43.5),ylim=c(
10.75 , 12.75), col= col1 )
plot(shpe[4],add=TRUE,col=rgb(0, 1, 0, 0.005),
lwd=2,lty=2)
#
dim(pre1.mask )
pre1.df = as.data.frame(pre1.mask[[1]], xy=TRUE)
pre1.df = pre1.df[complete.cases(pre1.df),]
# head(pre1.df[pre1.df!= "NA"])
View(head(pre1.df,5)) ; View(tail(pre1.df,5))
dim(pre1.df)
names(pre1.df)
#
pre1.df = as.data.frame(pre1.mask, xy=TRUE)
pre1.df = pre1.df[complete.cases(pre1.df),]
head(pre1.df); tail(pre1.df)
dim(pre1.df)
View(head(t(pre1.df),5))
dim(t(pre1.df))
#
pre1.df.tim = t(pre1.df)
# pre1.df.tim = pre1.df.tim[-c(1:2),];View(pre1.df.tim)
View(head(pre1.df.tim,5))
dim(pre1.df.tim)
pre1.df.time = pre1.df.tim[-c(1:2),];
dim(pre1.df.time); View(head(pre1.df.time ,5))
#
require(lubridate); require(dplyr)
df <- data.frame(date =
seq(as.Date("1981/1/1"), as.Date("1981/12/31"),
"day") )
df$week <- floor_date(df$date, "week")
df$month <- floor_date(df$date, "month")
df$year <- floor_date(df$date, "year")
View(df)
dim(df)
#
data1 = data.frame(df,pre1.df.time )
dim(data1)
View(head(data1,5))
data1 %>%
group_by(month) %>%
summarize(sum
= sum(data1[,5]))
#
write.table(pre1.df.tim,
"pixel.txt");getwd()
#
rm(list=ls())
rm(list=ls())
rm(list=ls())
#
library(ncdf4)
library(lubridate)
library(raster)
library(tidyverse)
require("rgdal")
require("ggplot2")
require(sf)
require(ggmap)
library(reshape2)
library(dichromat)
library(spData)
library(sf)
library(mapview)
#
st_read(system.file("shapes/boston_tracts.shp",package
= "spData"), quiet = TRUE)
map$vble <- map$MEDV
mapview(map, zcol = "vble")
#
getwd()
setwd("C:/Users/hp/Downloads/RCM DATA");
getwd()
#
"pr_AFR-22_CCCma-CanESM2_rcp85_r1i1p1_CCCma-CanRCM4_r2_day_20060101-20101231lo.nc
# "C:\Users\hp\Downloads\GCM
DATA\pr_day_CanESM5_historical_r1i1p1f1_gn_19800101-20141231_v20190429lo.nc"
#
nc_fname
<-
"pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc"
our_nc_data <-nc_open("pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc") # correct name
# nc_ds <- nc_open(nc_fname)
# our_nc_data1 <-
nc_open("C:/Users/hp/Documents/1-JAN U.nc") # correct name
# WD = (360*3.14159265)*
atan2(vrb1,vrb)+180 (en graph circulaire sinon sans).
attributes(our_nc_data$var)
#
varpr = ncvar_get(our_nc_data, "pr")
dim(varpr)
# 3D: row X / ncol Y / layer TIME
varpr[ , , 12775] #
Toute la periode
# varpr[ , , 1] #
UN jour
# varpr[[1]]
# 1er valeure
start.date = as.Date('1980-01-01') # Historical : 1980-2014
times.sec <- ncvar_get(our_nc_data, 'time_bnds')
times.day <- times.sec/(24*3600)
datex <- start.date + times.day
length(datex)
time = datex
vartime=ncvar_get(our_nc_data, "time_bnds")
# ; vartime
length(vartime)
vartime<-time
#
varlat <- ncvar_get(our_nc_data,
"lon")
varlon <- ncvar_get(our_nc_data,
"lat")
varlat
varlon
# lswt_array[lswt_array==fillvalue$value] <- NA
#
#LOCAL STATION EXTRACTION: NETCDF TO raster
getwd()
setwd("C:/Users/hp/Downloads/RCM DATA")
dir()
aus_temp <-
stack("pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc")
# b<- brick("pr_day_CanESM5_historical.nc")
dim(aus_temp)
ncol(aus_temp)
nrow(aus_temp)
ncell(aus_temp)
nlayers(aus_temp)
projection(aus_temp)
res(aus_temp)
head(names(aus_temp))
tail(names(aus_temp))
#
xtime=seq(as.Date("1980/1/1"),
as.Date("2014/12/31"), "days")
length(xtime) # "%Y-%m-%d %Z"
#
dim(aus_temp)
x11();plot(aus_temp[[1]])
aus_temp1 <-aus_temp[[1:1825]] *86400
aus = aus_temp1[[mean(1:1825)]]
x11();plot(aus)
res(aus)
crs(aus)
crs(aus) <- "+proj=longlat +datum=WGS84
+no_defs"
writeRaster(aus, filename =
"CORDEX.tif");getwd() #
supprimer lancien fichier exporter
cl <-
colorRampPalette(c("red","orange","yellow","green","blue"))
x11();plot(aus_temp$X1981.01.01,col=cl(200))
#plot(b[[1]],col=cl(200)); plot(points,add=T)
x11();plot(aus_temp$X1981.01.01,col=cl(200))
tail(names(aus_temp))
x11();spplot(aus_temp$X1981.01.01)
#
xtime=seq(as.Date("1980/1/1"),
as.Date("2014/12/31"), "days")
length(xtime) # "%Y-%m-%d %Z
#
aus_temp <-
stack("pr_AFR2_day_19910101-19951231.nc")
head(names(aus_temp)); tail(names(aus_temp))
crs(aus_temp) <- "+proj=longlat +datum=WGS84
+no_defs"
projection(aus_temp)
a = 43.15; b = 11.55
mydf <- structure(list(
longitude =
c(a),
latitude
= c(b)),
.Names =
c("longitude", "latitude"), class = "data.frame",
row.names = c(NA, -1L)) # -1L = Nombre
de station
xy <- mydf[,c(1,2)]
spdf <- SpatialPointsDataFrame(coords = xy, data =
mydf, proj4string = CRS("+proj=longlat +datum=WGS84 +ellps=WGS84
+towgs84=0,0,0"))
#
proj_temp_cities <- extract(aus_temp, spdf)
proj_temp_cities <-raster::extract(aus_temp, spdf)
class(proj_temp_cities)
dim(proj_temp_cities)
proj_temp_cities1
<-proj_temp_cities[1,1:length(proj_temp_cities)] *86400
class(proj_temp_cities1)
proj_temp_cities1= as.data.frame(proj_temp_cities1)
head(proj_temp_cities1)
write.table(proj_temp_cities1,
"RCM.txt",col.names=FALSE, row.names=FALSE); getwd()
#
# PLOTING MAPS:
proj_temp_cities
<-as.data.frame(t(proj_temp_cities))
proj_temp_cities$year <- 1:nrow(proj_temp_cities)+1980
names(proj_temp_cities) <- c("Djibouti",
"year")
proj_temp_cities_melt <- melt(proj_temp_cities,
id="year")
#
ts1 <- ggplot(proj_temp_cities_melt) +
geom_line(aes(x=year, y=value, colour=variable)) +
ggtitle("") +
ylab("") +
scale_color_brewer(name= "cities", palette="Set1") +
theme_bw()
x11();ts1
lswt_vec_long <- as.vector(varpr)
cbind(head(lswt_vec_long))
lswt_vec_long <-lswt_vec_long*86400 # mm/day
cbind(head(lswt_vec_long))
#
nc_df <- data.frame(lswt_vec_long)
nc_df[1:50,]
dim(nc_df)
names(nc_df) <- c("lon", "lat",
"depth", "time", "varpr", "vartime")
head(na.omit(nc_df), 5) # Display some non-NaN values for a visual
check
csv_fname <- "netcdf_filename.csv"
write.table(nc_df, csv_fname, row.names=FALSE,
sep=";")
getwd()
#
lonlattime <-
as.matrix(expand.grid(varlon,varlat,vartime))
# write.table(lswt_vec_long
,"lswt_vec_long.txt")
#
dim_time <- ncvar_get(our_nc_data,
"time_bnds")
t_units <- ncatt_get(our_nc_data,
"time_bnds", "units")
t_ustr <- strsplit(t_units$value, " ")
# t_ustr <- strsplit(as.character(t_units$value),
" ")
t_dstr <- strsplit(unlist(t_ustr)[3],
"-")
date <- ymd(t_dstr) + dseconds(dim_time)
coords <- as.matrix(expand.grid( date))
# var1 <- ncvar_get(our_nc, "var1",
collapse_degen=FALSE)
time_obs <- as.POSIXct(dim_time, origin =
"1981-01-01", tz="GMT")
range(time_obs)
#
library(chron); library(lattice);
library(RColorBrewer)
ustr <-
strsplit(tunits$value, " ")
tdstr <- strsplit(unlist(tustr)[3],
"-")
tmonth <- as.integer(unlist(tdstr)[2])
tday <-
as.integer(unlist(tdstr)[3])
tyear <-
as.integer(unlist(tdstr)[1])
chron(time,origin=c(tmonth, tday, tyear))
#
# replace netCDF fill values with NA's
tmp_array[tmp_array==fillvalue$value] <- NA
length(na.omit(as.vector(tmp_array[,,1])))
image(lon,lat,tmp_slice,
col=rev(brewer.pal(10,"RdBu")))
#
https://pjbartlein.github.io/REarthSysSci/netCDF.html
#
nc_df <- data.frame(cbind(varpr))
nc_df <- data.frame(cbind(coords, varpr))
nc_df <- data.frame(cbind(varpr,varlat,varlon))
nc_df[1:50,]
dim(nc_df)
#
names(nc_df) <- c("lon", "lat",
"depth", "time", "varpr", "vartime")
head(na.omit(nc_df), 5) # Display some non-NaN values for a visual
check
#
csv_fname <- "netcdf_filename.csv"
write.table(nc_df, csv_fname, row.names=FALSE,
sep=";")
getwd()
nc_close(our_nc_data)
#
3 - CONVERT POINTS TO COORDINATE WGS 1984:
library(maptools)
library(rgdal)
library(sp)
#
dim( pre1.df.tim )
View(head(pre1.df.tim,5))
WGScoor <- t( pre1.df.tim[1:2,] )
cbind(names(as.data.frame(WGScoor)))
#
WGS84 = as.data.frame(WGScoor)
coordinates(WGS84 ) = ~x+y # column names of the lat long cols
proj4string(WGS84 )<- CRS("+proj=longlat
+datum=WGS84") # set coordinate system to WGS
WGScoor.df <- SpatialPointsDataFrame(WGS84 ,
data.frame(id=1:length(WGS84 )))
LLcoor<-spTransform(WGScoor.df,CRS("+proj=longlat"))
LLcoor.df=SpatialPointsDataFrame(LLcoor,
data.frame(id=1:length(LLcoor)))
x11(); plot(LLcoor.df,col="black", pch=20) #
rgb(0, 1, 0, 0.005)
# writeOGR(LLcoor.df,
dsn=getwd(),layer="MyShapefile",driver="ESRI Shapefile");
getwd()
library(tmap);data(World)
World.ae <- st_transform(World, "+proj=robin
+lon_0=", meridian2 ,"+k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84
+units=m +no_defs")
x11();tm_shape(World.ae) + tm_fill() + tm_borders()
#
https://mgimond.github.io/Spatial/coordinate-systems-in-r.html
#
https://gis.stackexchange.com/questions/214062/create-a-shapefile-from-dataframe-in-r-keeping-attribute-table
#
4 - READ HDF5 AND JSON FILES:
# install.packages("BiocManager")
# BiocManager::install("rhdf5")
library(rhdf5)
getwd()
wd <- "~C/Users/hp/Downloads" ; setwd(wd)
h5createFile("3B-HHR.MS.MRG.3IMERG.20000601-S000000-E002959.0000.V06B.h5")
h5ls("3B-HHR.MS.MRG.3IMERG.20000601-S000000-E002959.0000.V06B.h5")
h5dump("3B-HHR.MS.MRG.3IMERG.20000601-S000000-E002959.0000.V06B.h5")
H5close()
#
#Export in Raster /NETCDF
getwd(); setwd("C:/Users/hp/Desktop/")
bf <- writeRaster(naip_ndvi,
filename=file.path("TestGodoria1987.tif"),
options="INTERLEAVE=BAND", overwrite=TRUE)
# https://r-gis.net/?q=rts
# https://www.youtube.com/watch?v=wLGanYOLzV8
#
https://www.neonscience.org/resources/learning-hub/tutorials/hdf5-intro-r
#https://www.bioconductor.org/packages/devel/bioc/vignettes/rhdf5/inst/doc/rhdf5.html
#
https://search.earthdata.nasa.gov/search/granules?p=C1598621093-GES_DISC&pg[0][v]=f&pg[0][qt]=2000-01-01T00%3A00%3A00.000Z%2C2021-12-
# 7 - READ JSON FILES :
# install.packages("rjson")
library("rjson")
myData <- fromJSON(file="data.json")
print(myData)
json_data_frame <- as.data.frame(myData)
print(json_data_frame)
#
#https://www.educative.io/answers/how-to-read-json-files-in-r
# EXPORT
SHAPEFILE AND GRID FROM R:
library(raster)
library(sf)
library(raster)
library(ggplot2)
library(rgdal)
library(stars)
library(rnaturalearth)
#
setwd("C:/Users/hp/Documents/ArcGIS 10.8/Djibouti
for mask/MASK SHAPEFILE Djibouti")
shp <- shapefile("Regions.shp")
shp <- spTransform(shp, CRSobj = "+proj=utm
+zone=38 +datum=WGS84 +units=m +no_defs +ellps=WGS84 +towgs84=0,0,0")
plot(shp)
grid<- st_make_grid(shp, cellsize = c(10000,
10000), square = T)
plot(grid)
#
setwd("C:/Users/hp/Documents")
#
world = ne_countries(scale = "small",
returnclass = "sf")
world = st_transform(world, "+proj=robin +lon_0=0
+x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
pol = world[world$sovereignt ==
c("Djibouti", "Ethiopia"), ]
#Make grid
grid = st_as_stars(st_bbox(pol), dx = 10000, dy =
10000)
grid = st_as_sf(grid)
grid = grid[pol, ]
# Plot
plot(st_geometry(grid), axes = TRUE, reset = FALSE)
plot(st_geometry(world), border = "grey",
add = TRUE)
plot(st_geometry(pol), border = "red", add = TRUE)
# plot(st_geometry(world), border = "grey")
library(maps)
map("world", xlim=c(42,43), ylim=c(10,14))
#
st_write(grid,"C:/Users/hp/Documents/pol.shp")
#
https://stackoverflow.com/questions/64569984/how-to-generate-10x10km-grid-cells-of-all-countries
#
5– ACCESSING REANALYSIS, CLIMATE INDICES, AND OTHER CLIMATOLOGICAL DATA IN R
library("terra")
library("remotes")
library(RCurl)
library(heavyRain)
library(chirps)
library(RCurl)
#
#CHIRPS (precipitation data)
f <- system.file("ex/lux.shp", package =
"terra")
v <- vect(f)
r3 <- get_chirps(v, dates, server =
"CHC", as.raster = TRUE)
#
lonlat <- data.frame(lon = c(43.1483,
43.1583),
lat = c(11.5386, 11.5486))
dates <-
c("1981-01-1","1981-1-16") # Eviter le 01/01 ou l'identique
# dates <-
c("2017-12-15","2017-12-31") # 16 JOURS
# https://data.chc.ucsb.edu/products/CHIRPS-2.0/africa_daily/tifs/p05/1981/
#
https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/netcdf/p05/
#http://127.0.0.1:18212/library/chirps/html/get_chirps.html
dat <- get_chirps(lonlat, dates, server
= "CHC", as.matrix = FALSE)
# cLIMATE HAZARD
dat <- get_chirps(lonlat, dates, server
= "ClimateSERV", as.matrix = FALSE) # CLIMATE ENGINE
#
View(dat)
summary(dat)
#
write.table(dat, "data.txt", row.names
=FALSE, col.names=FALSE)
getwd()
#
# take the indices for the entire period
precip_indices(dt, timeseries = FALSE)
# take the indices for periods of 7 days
precip_indices(dt, timeseries = TRUE, intervals = 7)
#
vignette("Overview", package =
"chirps")
#
# random geographic points within bbox(10, 12, 45, 47)
library("sf")
set.seed(123)
lonlat <- data.frame(lon = runif(1, 10, 12),lat =
runif(1, 45, 47))
gjson <- as.geojson(lonlat)
#
# https://github.com/ropensci/chirps
# https://gis.stackexchange.com/questions/382700/downloading-chirps-data-sets-using-r
#
https://code.earthengine.google.com/?scriptPath=Examples:Datasets/UCSB-CHG/UCSB-CHG_CHIRPS_DAILY
#
# CHIRTS (temperature data)
dates <-
c("2010-12-15","2010-12-31")
temp1 <- get_chirts(tp_point, dates, var =
"Tmax", as.matrix = TRUE)
temp2 <- get_chirts(tp_point, dates, var =
"Tmin", as.matrix = TRUE)
rhu <- get_chirts(tp_point, dates, var =
"RHum", as.matrix = TRUE)
##
https://disc.gsfc.nasa.gov/datasets?keywords=GPM%20IMERG&page=1
#Get Integrated Multisatellite Retrievals for GPM
(IMERG) data
#
lonlat <- data.frame(lon = c(43.1483, 43.1583),
lat = c(11.5386, 11.5486))
dates <- c("2017-12-15",
"2017-12-31")
dt <- get_imerg(lonlat, dates)
View(dt)
summary(dt)
#
https://search.r-project.org/CRAN/refmans/chirps/html/get_imerg.html
#
# SM2RAIN algorithm
#
remotes::install_github("itsoukal/sm2rainR")
library(sm2rainR)
ts_df=(data[[1]])
head(ts_df)
Opt=sm2rainCalib(fn=sm2rainNSE, data= ts_df,
itermax=20, NP=10)
print(Opt$obj)
# Estimate percipitation using SM2RAIN algorithm
Sim = sm2rain(PAR=Opt$params, data=ts_df, NN=24)
#
par(mfrow=c(1,2))
plot(Sim$Pobs, t='l', col='black', xlab='Time
index[days]', ylab='Rainfall [mm]')
lines(Sim$Psim, col='red' )
legend("topleft",
legend=c("Observed", "Simulated"), col=c('black', 'red'),
lty=c('solid','solid'), cex=0.7)
plot(Sim$Psim, Sim$Pobs, pch=19, xlab='Simulated
rainfall [mm]', ylab='Observed rainfall [mm]')
# https://github.com/itsoukal/sm2rainR
#
library(RCurl)
URL <-
"https://disc.gsfc.nasa.gov/datasets/GPM_3IMERGDF_06/summary?keywords=imerg"
download.file(URL, destfile = "https://data.gesdisc.earthdata.nasa.gov/data/GPM_L3/GPM_3IMERGDF.06/2000/06/3B-DAY.MS.MRG.3IMERG.20000601-S000000-E235959.V06.nc4",
method="curl")
download.file(url, destfile, method, quiet = FALSE,
mode = "w", cacheOK = TRUE)
URL <-
"https://data.gesdisc.earthdata.nasa.gov/data/GPM_L3/GPM_3IMERGDF.06/2000/06/3B-DAY.MS.MRG.3IMERG.20000601-S000000-E235959.V06.nc4
"
x <- getURL(URL)
#
# IN IRI
LDEO
# 1 ---- NASA
GES_DISC GPM---IMERG_Late IMERG_Final
# 2 ---- NASA
GES-DAAC ---TRMM_L3
# 3----- ERA5
precipitation en m/day *1000 = mm/day
#
https://confluence.ecmwf.int/pages/viewpage.action?pageId=197702790
#
# Download the data WORLD CLIM
library(raster)
library(grDevices)
library(spatialEco)
datasets<-getData('worldclim', var='prec', res=10)
#
##cropping to some random smaller extent
e <- as(extent(40,45,10,12), 'SpatialPolygons')
crs(e) <- "+proj=longlat +datum=WGS84
+no_defs"
datasets <- crop(datasets, e)
names(datasets)<-month.abb
##plot the data to check
plot(datasets[[1]])
#
getwd(); setwd("C:/Users/hp/Desktop/")
bf <- writeRaster(naip_ndvi,
filename=file.path("TestGodoria1987.tif"),
options="INTERLEAVE=BAND", overwrite=TRUE)
# st_write(plots_harv, paste0(getwd(), "/",
"meuse.shp"), layer_options = "NAME_1")
# The WorldClim data
#
library(raster)
ship <- getData('GADM' , country="DJI",
level=0)
X11();plot(ship)
w = getData('worldclim', var='tmin', res=0.5, lon=5,
lat=45)
rast<-getData('CMIP5', var='tmin', res=10, rcp=85,
model='HD', year=50)
#
require(pRecipe)
getwd()
download_data("mswep", getwd(),timestep =
"monthly")
#
require(geodata)
library(raster)
require(rgdal)
library(rgeos)
#
getData('ISO3')
getData('alt', country='DJI', mask=TRUE)
SRTM=raster('DJI_msk_alt.grd')
X11();plot(SRTM, col=rainbow(200))
# Interpolation
srtm_utm45n=projectRaster(from=SRTM, res=30,
crs='+init=epsg:32645', method="bilinear",
filename="srtm_dem_30m.tif",format='GTiff',overwrite=TRUE)
X11();plot(srtm_utm45n)
#
worldclim <-
worldclim_country("Djibouti", var="srad", path=getwd()) #
tempdir()
#Mosaic/merge srtm tiles
srtmmosaic <- mosaic(srtm, srtm2, srtm3, fun=mean)
# https://www.gis-blog.com/r-raster-data-acquisition/
# http://worldclim.com/formats1
# Download various precipitation data products
library(pRecipe)
# download_mswep(folder_path =
"C:/Users/hp/Documents", domain = "raw", time_res =
"monthly")
download_data("mswep","C:/Users/hp/Documents",
timestep = "monthly")
getwd()
#
download_data(
dataset =
"all",
path =
".",
domain =
"raw",
timestep =
"monthly"
)
getwd()
# tempdir()
#
"20cr" for 20CR v3,
#. "chirps" for CHIRPS v2.0,
#. "cmap" for CMAP standard version,
#. "cmorph" for CMORPH,
#. "cpc" for CPC-Global,
#. "cru-ts" for CRU_TS v4.06,
#. "em-earth" for EM-EARTH,
#. "era20c" for ERA-20C,
#. "era5" for ERA5,
#. "fldas" for FLDAS,
#. "ghcn" for GHCN-M v2,
#. "gldas-clsm" for GLDAS CLSM,
#. "gldas-noah" for GLDAS NOAH,
#. "gldas-vic" for GLDAS VIC,
#. "gpcc" for GPCC v2020,
#. "gpcp" for GPCP v2.3,
#. "gpm_imerg" for GPM IMERGM Final v06,
#. "jra55" for JRA-55,
#. "merra2" for MERRA-2,
#. "mswep" for MSWEP v2.8,
#. "ncep-doe" for NCEP/DOE,
#. "ncep-ncar" for NCEP/NCAR,
#. "persiann" for PERSIANN-CDR,
#. "precl" for PREC/L,
#. "terraclimate" for TerraClimate,
#. "trmm-3b43" for TRMM 3B43 v7,
#. "udel" for UDEL v501.
#
https://cran.r-project.org/web/packages/pRecipe/pRecipe.pdf
# Atmospheric Circulation
#
library(rsoi)
download_aao(use_cache = FALSE, file = NULL) # Download Antarctic Oscillation data
download_ao(use_cache = FALSE, file = NULL) # Download Arctic Oscillation data
download_dmi(use_cache = FALSE, file = NULL) # Download Multivariate ENSO Index Version 2
(MEI.v2)
#
dmi <- download_dmi() # IOD
enso <- download_enso() # Download Southern
Oscillation Index and Oceanic Nino Index data
#
download_enso(climate_idx = c("all",
"soi", "oni", "npgo"), create_csv = FALSE)
download_mei(use_cache = FALSE, file = NULL) #
download_nao(use_cache = FALSE, file = NULL) # Download North Atlantic Oscillation data
download_npgo(use_cache = FALSE, file = NULL) #
Download North Pacific Gyre Oscillation data
download_oni(use_cache = FALSE, file = NULL) # Download Oceanic Nino Index data
download_pdo(use_cache = FALSE, file = NULL) # Download Pacific Decadal Oscillation Data
download_soi(use_cache = FALSE, file = NULL) # Download Southern Oscillation Index data
#
https://cran.r-project.org/web/packages/rsoi/rsoi.pdf
#https://climatedataguide.ucar.edu/climate-data/nino-sst-indices-nino-12-3-34-4-oni-and-tni
# Performance Reanalysis Data
summary(data)
names(data)
attach(data)
data1=na.omit(data)
sapply(data1,range)
#
# Continuous statistical metrics
dim(data)
cbind(names(data))
cbind(round(cor(data1), digits=2)[,1])
#
data2=data[,c(1,4)];round(cor(na.omit(data2)),2)
data2=data[,c(1,5)];round(cor(na.omit(data2)),2)
#
data2=data[,c(1,7)];round(cor(na.omit(data2)),2)
data2=data[,c(1,8)];round(cor(na.omit(data2)),2)
#
attach(data)
cbind(names(data)) # CHIRPS GsMAP
IMERGF TRMM3B42 ERA5
VAR=data1$pr_CHIRPS
VARx=data1$pr_OBS
#
# Coefficient of determination
R2= sum((VAR-(mean(VAR)))*(VARx-(mean(VARx))))/sqrt(
sum((VAR-(mean(VAR)))^2)*sum((VARx-(mean(VARx)))^2));round(R2,3)
#
# Root Mean Square Error (RMSE)
RMSE=sqrt(sum((VARx-VAR)^2)/length(VARx));round(RMSE,3)
#
# Percent Bias (PB)
PBIAS = 1 *(abs(sum(VARx-VAR)) /
sum(VARx));round(PBIAS,3)
#
# Mean Absolute Error (MAE)
MAE=sum(abs(data1$OBS-VAR))/length(data1$OBS);round(MAE,3)
#
# Relative Bias (RB)
RBIAS =
sum(VAR-data1$OBS)/sum(data1$OBS);round(RBIAS,3)
#
BIAS = sum(data1$OBS-VAR)/length(data1$OBS);round(BIAS,3)
#
# Index of Agreement (IA)
IA = sum((data1$OBS-VAR)^2)/ ((
sum(abs(data1$OBS-mean(data1$OBS))+abs(VAR-mean(VAR)) )) ^2 )
round(IA,3)
#
Y=CHIRPS
reg1=lm(OBS~Y,data=data); summary(reg1)
#
limx=c(0,150);limy=c(0,200) #windows(height = 70, width = 70)
plot(-20,-20,xaxt="n",yaxt="n",xlim=limx,ylim=limy,cex.lab=1.2,xlab="Ground
Based (mm/month)",ylab="Remotely sensed (mm/month)")
axis(1,cex.axis=1.3);axis(2,cex.axis=1.3)
par(new=TRUE)
plot(OBS~CHIRPS,xlim=limx,ylim=limy,pch=20,xaxt="n",yaxt="n",col="red",xlab="",ylab="");abline(reg1,col="red",lwd=2)
par(new=TRUE)
plot(OBS~GsMAP,xlim=limx,ylim=limy,pch=20,xaxt="n",yaxt="n",col=
"blue",xlab="",ylab="");abline(reg2,col="blue",lwd=2)
par(new=TRUE)
plot(OBS~IMERGF,xlim=limx,ylim=limy,pch=20,xaxt="n",yaxt="n",col="orange",xlab="",ylab="");abline(reg3,col="orange",lwd=2)
#
legend(30,400, legend=c("CH",
"PE", "TA", "AR", "ER"),horiz=T,bty
="n",
col=c("red","blue","brown","orange","darkgreen"),
lty=c(1,1,1,1,1), lwd=c(3,3,3,3,3), cex=0.8)
#
x11();boxplot(data,las=2, col=rainbow(8),pch=20,lwd=2)
grid(col="grey", lwd=2.5,lty=1)
#abline(h=c(-1.5,-1,-0.5,0,0.5,1,1.5),col=2)
box(lty = 1,lwd=2.5, col = 1)
par(new=TRUE)
#
#valeur manquante sans tenue compte par
defaut|:
attach(data); names(data)
require(plotrix)
X11();oldpar<-taylor.diagram(
data$OBS,data$CHIRPS,add=F, pch=19,pos.cor=TRUE,
xlab="Standard Deviation",ylab="Standard
Deviation",main="",
show.gamma=TRUE,ngamma=10,gamma.col="green",sd.arcs=1,
ref.sd=TRUE,sd.method="sample",grad.corr.lines=c(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.99), col="blue",pcex=1.5,cex.axis=1.1,normalize=F)
# now add the worse model
taylor.diagram(data$OBS,data$GsMAP,add=TRUE,col= "red",
pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$IMERGF,add=TRUE,col= "green",
pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$TRMM3B42,add=TRUE,col="orange",
pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$ERA5,add=TRUE,col= "purple",
pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$ERA5LAND,add=TRUE,col="cyan",
pcex=1.5,cex.axis=1.1,normalize=F)
# get approximate legend position # add a legend
#
legend(1.3,1.6,legend=c("JF.OB","JF.CH","JF.PE","JF.TA","JF.AR","JF.ER"),horiz=FALSE,
pch=c(15,19,19,19,19,19),col=c("darkgreen","red","blue","brown","orange","darkgreen"),
cex=0.7)
#
X11();oldpar<-taylor.diagram(data$OBS,data$CFSR,add=F,
pch=19,pos.cor=TRUE,
xlab="Standard
Deviation",ylab="Standard Deviation",main="",
show.gamma=TRUE,ngamma=10,gamma.col="green",sd.arcs=1,
ref.sd=TRUE,sd.method="sample",grad.corr.lines=c(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.99),
col="blue",pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$CHIRT,add=TRUE,col=
"red", pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$ERA5,add=TRUE,col= "green",
pcex=1.5,cex.axis=1.1,normalize=F)
taylor.diagram(data$OBS,data$ERA5Ag,add=TRUE,col="orange",
pcex=1.5,cex.axis=1.1,normalize=F)
# get approximate legend position # add a legend
##
require(plotrix)
x11();taylor.diagram(OBS,CHIRPS, col=
"blue",
pos.cor=F,pcex=1.5,normalize=F)
taylor.diagram(OBS,GsMAP,add=TRUE, col= "red", pcex=1.5,normalize=F)
taylor.diagram(OBS,IMERGF,add=TRUE, col="green", pcex=1.5,normalize=F)
#
require(openair)
Observed_Arrow<-read.table("~/Taylor
Diagram/Arrow_observed.txt", header = T)
Simulated_Arrow<-read.table("~/Taylor
Diagram/Arrow_simulated.txt", header = T)
combined_dataset<-cbind(Observed_Arrow[
18263:length(Observed_Arrow[,1]),], Arrow_sim=Simulated_Arrow[1:10592,4])
#
TaylorDiagram(combined_dataset, obs =
"ARROW_obs", mod = "Arrow_sim")
TaylorDiagram(combined_dataset, obs =
"ARROW_obs", mod = "Arrow_sim", group = "Month")
TaylorDiagram(combined_dataset, obs =
"ARROW_obs", mod = "Arrow_sim", group = "Month",
type = "Year")
#
library(verification) # Performance Diagram (POD vs
SCI)
#
performance.diagram(main = "")
RB1 <- matrix(c(95, 55, 42, 141), ncol =
2) # Organiser le vecteur en matrice
pts <-
table.stats(RB1)
boot.pts <- table.stats.boot(RB1, R = 100 )
# add confidence intervals
segments(x0=1-pts$FAR,
y0=boot.pts["up","pod"],
x1=1-pts$FAR, y1=boot.pts["dw", "pod"], col=2,
lwd=2)
#
segments(x0=1-boot.pts["up","far"],
y0=pts$POD,
x1=1-boot.pts["dw","far"], y1=pts$POD, col=2, lwd=2)
points(1 - pts$FAR, pts$POD, col = 2, cex = 2)
#
# https://msrl.kongju.ac.kr/fbarea/1026
#https://rdrr.io/cran/verification/man/performance.diagram.html
# 6 Contingency statistical metrics :
#
attach(data)
dim(data)
cbind(names(data))
# 0.1 mm/h/d Threshold value of precipitation
#
Sat= subset(data,
IMERGL>=0.1 )
Obs= subset(data,
OBS>=0.1 )
#
Sat0= subset(data,
IMERGL<0.1 )
Obs0= subset(data,
OBS<0.1 )
#
# 1- Probability of detection (POD)
POD=table(Sat$OBS>=0.1)[2]/ (length(Obs$OBS)
+table(Sat$OBS>=0.1)[2])
round(POD,3)
# 2- False Alarm Ratio (FAR)
FAR= length(Sat$IMERGL)/ (length(Sat$IMERGL)
+table(Sat$OBS>=0.1)[2])
round(FAR,3)
# 3- Critical Success Index (CSI)
CSI=
table(Sat$OBS>=0.1)[2]/(table(Sat$OBS>=0.1)[2]+
length(Obs$OBS)+length(Sat$IMERGL) )
round(CSI,3)
# 4- Frequency Bias Index (FBI)
FBI=
(table(Sat$OBS>=0.1)[2]+length(Sat$ERA5))/(length(Obs$OBS)
+table(Sat$OBS>=0.1)[2])
round(FBI,3)
#
# 5- Heidke Skill Score (HSS)
HSS =
2*((table(Sat$OBS>=0.1)[2]*table(Sat0$OBS<0.1)[2])-(length(Obs$OBS)*length(Sat$IMERGF)))
/
(((length(Obs$OBS) +table(Sat$OBS>=0.1)[2])*
(table(Sat$OBS>=0.1)[2]+table(Sat0$OBS<0.1)[2])) +
((table(Sat$OBS>=0.1)[2]+length(Sat$IMERGF))*
(length(Obs$OBS)+table(Sat0$OBS<0.1)[2])) )
round(HSS,3)
#
# 6 - Threat score (TS)
TS = table(Sat$OBS>=0.1)[2]/
(table(Sat$OBS>=0.1)[2]+length(Sat$IMERGF)+length(Obs$OBS))
round(TS,3)
# 10.1080/01431161.2017.1312031
#
# Volumetric
statistics :
#*><*><*><*><*><*><*><*><*
#https://www.researchgate.net/publication/341415510_Assessment_of_satellite_precipitation_product_estimates_over_Bali_Island
# https://doi.org/10.1016/j.atmosres.2020.105032
#
# PERFORMANCE STATISTICAL INDEX
rm(list=ls());rm(list=ls());rm(list=ls());rm(list=ls());rm(list=ls())
# CALIBRATE TO IMPORT
data=read.table(file("clipboard"),header=T,
sep="\t", dec=".", row.names=1)
#
data=read.table(file("clipboard"),header=T, sep="\t",
dec=".")
library(pastecs)
round(stat.desc(data),2)
str(data)
attach(data)
names(data)
#
data1=na.omit(data)
head(data1)
tail(data1)
cbind(names(data1))
#
data11=na.omit(data[,c(2,6)]); head(data11); tail(data11)
VAR=data11$pr_CHIRPS ; VARx=data11$pr_CORDEXhist
#
# Correlation Coefficient (CC)
#
library(psych); lowerCor(data)
#
round(cor(data, use="complete.obs",
method="pearson"),3)
round(cor(VAR, VARx,method="pearson"),3)
#
# Root Mean Square Error (RMSE)
RMSE=sqrt(sum((VARx-VAR)^2)/length(VARx))
round(RMSE,3)
# Coefficient of determination
R2= sum((VAR-(mean(VAR)))*(VARx-(mean(VARx))))/sqrt(
sum((VAR-(mean(VAR)))^2)*sum((VARx-(mean(VARx)))^2))
round(R2,3)
# Percent Bias (PB)
PBIAS = 1 *(abs(sum(VARx-VAR)) / sum(VARx))
round(PBIAS,3)
#
#Mean Absolute Error (MAE)
MAE=sum(abs(VAR-VARx))/length(VAR);round(MAE,3)
#
#Relative Bias (RB)
RBIAS = sum(VAR-VARx)/sum(VAR);round(RBIAS,3)
#
BIAS = sum(VAR-VARx)/length(VAR);round(BIAS,3)
#
#Index of Agreement (IA)
IA = sum((VAR-VARx)^2)/ ((
sum(abs(VAR-mean(VAR))+abs(VARx-mean(VARx)) )) ^2 ); round(IA,3)
#
# GCM-CMIP Model Performance and Ranking INDEX:
#
cbind(names(data1))
data11=na.omit(data[,c(2,6)]); head(data11); tail(data11)
VAR=data11$pr_CHIRPS ; VARx=data11$pr_CORDEXhist
#
# TSS score
sdm= sd(VARx)
sdr= sd(VAR)
CC=
cor(VAR,VARx, use="everything", method="pearson");
round(CC,3)
#
TSS = (4*((1+CC)^2)) / ( (1+0.999)^2 * ((sdm/sdr)+(sdr/sdm))^2 )
round(TSS,3)
# IVS value
IVS = ((sdm/sdr) - (sdr/sdm))^2
round(IVS,3)
# Theil-Sen slope estimator ThSS
require(robslopes); require(RobustLinearReg)
set.seed(555)
TS<-TheilSen(VAR, VARx, alpha = NULL, verbose =
TRUE)
# TS1 <- theil_sen_regression(VAR~VARx)
TS
round(TS$slope, 3)
#
#
https://search.r-project.org/CRAN/refmans/robslopes/html/TheilSen.html
#
#
https://sci-hub.st/https://doi.org/10.1007/s00376-020-9289-1
# Atmosphere 2021, 12, 742.
https://doi.org/10.3390/atmos12060742
#
# PLOT SECTION
library(plot3D)
image2D(Hypsometry, xlab = "longitude", ylab
= "latitude",
contour
= list(levels = 0, col = "black", lwd = 2),
shade =
0.1, main = "Hypsometry data set", clab = "m")
rect(-50, 10, -20, 40, lwd = 3)
#
library("reshape")
library("ggplot2")
# data_melt <- melt(data)
attach(data);names(data)
ggp <- ggplot(data, aes(month, day)) + # Create heatmap
with ggplot2
geom_tile(aes(fill = OBS))
X11()
ggp
ggp + scale_fill_gradient(low = "green", high = "black")
ggp + scale_fill_gradient(low =
c("blue","cyan",
"green","yellow"), high = c("red") )+
theme_bw()
#
library("plotly")
plot_ly(z = data, type = "heatmap") # Apply plot_ly
function
plot_ly(z = data, colorscale = "Greys", type
= "heatmap") # Manual
colors
# https://statisticsglobe.com/heatmap-in-r
dif = subset(data, OBS>=0.3 )
#write.table(dif, "data.txt",
col.names=FALSE, row.names=FALSE)
#OBS[OBS>=0.3] (%in% = à l'interieur de
la serie).
#
https://www.datanovia.com/en/lessons/heatmap-in-r-static-and-interactive-visualization/
# https://r-graph-gallery.com/heatmap
#
#SCATTER PLOT in
library(plot3D)
npoints = 20
x = runif(npoints,1,20)
y = runif(npoints,1,20)
# generate random numbers between (0,30) for Z values
z = 30*runif(npoints)
# Call to scatter3D function
x11()
scatter3D(x,y,z,box=TRUE,pch=16,bty="b2",axes=TRUE,label=TRUE,
nticks=5, ticktype="detailed",theta=40, phi=40,
xlab="X-val",
ylab="Y-val",
zlab="Z-val", main="3D scatter plot")
#
library(datasets)
library(ggplot2)
data(airquality)
#
aq_trim <- airquality[which(airquality$Month
== 7 |
airquality$Month
== 8 |
airquality$Month
== 9), ]
aq_trim
aq_trim$Month <- factor(aq_trim$Month,labels =
c("July", "August", "September"))
#
p6 <- ggplot(aq_trim, aes(x = Day, y = Ozone)) +
geom_point()
p6 <- ggplot(aq_trim, aes(x = Day, y = Ozone, size
= Wind)) +
geom_point();
p6
p6 <- ggplot(aq_trim, aes(x = Day, y = Ozone, size
= Wind)) +
geom_point(shape = 21); p6
p6 <- ggplot(aq_trim, aes(x = Day, y = Ozone, size
= Wind, fill = Temp)) +
geom_point(shape = 21) +
ggtitle("Air Quality in New York by Day") +
labs(x =
"Day of the month", y = "Ozone (ppb)") +
scale_x_continuous(breaks = seq(1, 31, 5));p6
p6 <- ggplot(aq_trim, aes(x = Day, y = Ozone, size
= Wind, fill = Month)) +
geom_point(shape = 21) +
ggtitle("Air Quality in New York by Day") +
labs(x =
"Day of the month", y = "Ozone (ppb)") +
scale_x_continuous(breaks = seq(1, 31, 5)); p6
p6 <- p6 + scale_size_area(max_size = 10); p6
#
library(grid)
p6 <- p6 + theme(legend.box =
"horizontal",
legend.key.size = unit(1, "cm"));p6
#
names(aq_trim)
fill = c("steelblue",
"yellowgreen", "violetred1")
#
p6 <- ggplot(aq_trim, aes(x = Day, y = Ozone, size
= Wind, fill = Month)) +
geom_point(shape = 21) +
theme_bw() +
theme() +
ggtitle("Air Quality in New York by Day") +
labs(x =
"Day of the month", y = "Ozone (ppb)",
size =
"Wind Speed (mph)", fill = "Months") +
scale_x_continuous(breaks = seq(1, 31, 5)) +
scale_fill_manual(values = fill) +
scale_size(range = c(1, 10)) +
theme(legend.position="bottom",
legend.direction="horizontal",
legend.box = "horizontal",
legend.key.size = unit(1, "cm")); p6
#
https://t-redactyl.io/blog/2016/02/creating-plots-in-r-using-ggplot2-part-6-weighted-scatterplots.html
# https://r-graph-gallery.com/13-scatter-plot.html
#
library(ggplot2)
# shape is going to give the shapes as in your example
ggplot(iris, aes(x = Sepal.Length, y = Sepal.Width,
color = Petal.Length, shape = Species)) +
geom_point() +
# here the
colour you need, note that it could not be perfect
# for reading
data: rev() reverse the scale, to have red on
# higher
values
scale_colour_gradientn(colours =
rev(rainbow(5)))+
scale_x_continuous(name
= 'Salinity') +
scale_y_continuous(name =
'Temperature °C') +
theme_bw() +
ggtitle('Scatter plot Calcite Saturation State and Temperature')
#
#
https://stackoverflow.com/questions/70559917/four-variable-scatter-plot-with-colour-legend-in-r
#
library(ggplot2)
d = data.frame(x=runif(50),y=runif(50),z=runif(50))
ggplot(data = d, mapping = aes(x = x, y = y)) + geom_point(aes(colour
= z), shape = 19)
#
scatter_fill <- function (x, y,
z,xlim=c(min(x),max(x)),ylim=c(min(y),max(y)),zlim=c(min(z),max(z)),
nlevels = 20,
plot.title, plot.axes,
key.title, key.axes,
asp = NA, xaxs = "i",
yaxs = "i",
las = 1,
axes = TRUE,
frame.plot = axes, ...)
{
mar.orig <-
(par.orig <- par(c("mar", "las",
"mfrow")))$mar
on.exit(par(par.orig))
w
<- (3 + mar.orig[2L]) * par("csi") * 2.54
layout(matrix(c(2,
1), ncol = 2L), widths = c(1, lcm(w)))
par(las = las)
mar
<- mar.orig
mar[4L] <- mar[2L]
mar[2L] <- 1
par(mar = mar)
# choose colors to
interpolate
levels <-
seq(zlim[1],zlim[2],length.out = nlevels)
col <-
colorRampPalette(c("red","yellow","dark
green"))(nlevels)
colz <-
col[cut(z,nlevels)]
#
plot.new()
plot.window(xlim = c(0, 1), ylim = range(levels), xaxs = "i",
yaxs = "i")
rect(0,
levels[-length(levels)], 1, levels[-1L],col=col,border=col)
if
(missing(key.axes)) {if (axes){axis(4)}}
else key.axes
box()
if
(!missing(key.title))
key.title
mar <-
mar.orig
mar[4L] <-
1
par(mar = mar)
#
#
points
plot(x,y,type =
"n",xaxt='n',yaxt='n',xlab="",ylab="",xlim=xlim,ylim=ylim,bty="n")
points(x,y,col =
colz,xaxt='n',yaxt='n',xlab="",ylab="",bty="n",...)
#
# options to make mapping more customizable
if
(missing(plot.axes)) {
if (axes) {
title(main
= "", xlab = "", ylab = "")
Axis(x,
side = 1)
Axis(y,
side = 2)
}
}
else plot.axes
if
(frame.plot)
box()
if
(missing(plot.title))
title(...)
else
plot.title
invisible()
}
#
vx <- rnorm(40,0,1)
vy <- rnorm(40,0,1)
vz <- rnorm(40,10,10)
#
scatter_fill(vx,vy,vz,nlevels=15,xlim=c(-1,1),ylim=c(-1,5),zlim=c(-10,10),main="TEST",pch=".",cex=8)
#
#
library(latticeExtra)
require(RColorBrewer)
d = data.frame(x=runif(50),y=runif(50),z=runif(50));
attach(d)
#
levelplot(z ~ x + y, panel = panel.levelplot.points,
col.regions = heat.colors(50))
levelplot(z ~ x + y, panel = panel.levelplot.points,
col.regions =colorRampPalette(brewer.pal(11,"RdYlGn"))(50))
levelplot(z ~ x + y, panel = panel.levelplot.points,
col.regions = rainbow(50))
#
#
https://stackoverflow.com/questions/20127282/r-color-scatterplot-points-by-z-value-with-legend
# https://appsilon.com/matplotlib-vs-ggplot/
# Image PLOT in R
# Data
x <- -10:10
y <- -10:10
z <- sqrt(outer(x ^ 2, y ^ 2,
"+"))
image(x, y, z)
# You can also type, the following
# but the axes will be between 0 and 1
image(z)
#
image(x, y, z)
contour(x, y, z, add = TRUE)
#
https://www.r-bloggers.com/2022/10/how-to-use-the-image-function-in-r/
#
6 - STOCHASTIC DOWNSCALING METHODS:
# QM (Standard Quantile Mapping )
# DQM (Detrended Quantile Mapping )
# QDM (Quantile Delta Mapping )
# SDM (Scale Distribution Mapping )
# UQM (Unbiased Quantile Mapping )
# BCCAQ is a hybrid downscaling method that
combines outputs from Climate Analogues (CA) and quantile mapping at the
fine-scale resolution.
#
require(ClimDown)
library(doParallel)
library(ncdf4)
library(cmsaf)
library(hydroGOF)
#
data=read.table(file("clipboard"),header=T,
sep="\t", dec=".", row.names=1)
#
data=read.table(file("clipboard"),header=T, sep="\t",
dec=".")
str(data)
#
attach(data)
names(data)
library(pastecs)
round(stat.desc(data),2)
library(psych)
lowerCor(data)
cor(data,
use="complete.obs", method="pearson")
#
#
Delta statistical downscaling method (Dessu and Melesse, 2013):
#
attach(data)
cbind(names(data))
pr.OBS =na.omit( pr_CHIRPS ) # pr.OBS = pr_OBS
/ CHIRPS
pr.hist =na.omit( pr_CORDEX ) # pr.hist = pr_CanESM5_hist
pr.proj0
=na.omit( pr_CORDEX )
pr.proj =na.omit( pr_CanESM5_ssp119 ) # pr.proj = pr_CanESM5_ssp119
pr.Deltamet0
= pr.proj0*(mean(pr.OBS)/mean(pr.hist))
pr.Deltamet = pr.proj*(mean(pr.OBS)/mean(pr.hist))
cbind(pr.Deltamet0);
cbind(pr.Deltamet)
write.table(pr.Deltamet0,
"data0.txt",col.names=FALSE,row.names=FALSE);getwd()
write.table(pr.Deltamet,
"data.txt",col.names=FALSE,row.names=FALSE);getwd()
cbind(names(data))
tas.hist
=
tas.proj
=
tas.OBS =
tas.Deltamet = tas.proj + (mean(tas.OBS)-mean(tas.hist))
tas.Deltamet0
= tas.hist + (mean(tas.OBS)-mean(tas.hist))
cbind(tas.Deltamet)
write.table(tas.Deltamet0,
"data0.txt",col.names=FALSE,row.names=FALSE);getwd()
write.table(tas.Deltamet,
"data.txt",col.names=FALSE,row.names=FALSE);getwd()
#
https://agrimetsoft.com/statistical%20downscaling
#
https://ccafs-climate.org/bias_correction/
#
https://search.r-project.org/CRAN/refmans/HDInterval/html/inverseCDF.html
#
https://www.geeksforgeeks.org/compute-empirical-cumulative-distribution-function-in-r/
#
https://www.geeksforgeeks.org/plot-cumulative-distribution-function-in-r/
#
https://www.mdpi.com/2073-4441/12/3/801
#|-
Quantile Mapping (QM) statistical downscaling method Panofsky and Brier (1968)
#
#STATISTICAL
DOWNSCALING METHOD IN R
#-Transfer
Function : multilineair regression + biais correction
rm(list=ls());rm(list=ls());rm(list=ls());rm(list=ls());rm(list=ls())
#
data=read.table(file("clipboard"),header=T,
sep="\t", dec=".", row.names=1)
str(data)
names(data)
#
data=read.table(file("clipboard"),header=T, sep="\t",
dec=".")
data1=na.omit(data)
library(pastecs)
round(stat.desc(data),2)
#
library(psych)
lowerCor(data)
cor(data,
use="complete.obs", method="pearson")
#
x=
data$pr_OBS
y=
data$pr_CanESM5_ssp585
#
attach(data)
cbind(names(data))
reg=lm(pr~p5f+p5u+p5v+p5z+p8u+s500+s850
);summary(reg)
reg1=lm(pr~p5f+p5u+p5v+p5z+p8u+s500+s850
+lag(pr,-1) );summary(reg1) # R2=1 exactement Y,c'est faux.
#
analog <- downscale.train(data, method = "analogs", n.analogs = 1)
#
regression <- downscale.train(data, method = "GLM",family =
gaussian)
#
neuralnet <- downscale.train(data, method = "NN", hidden =
c(10,5), output = "linear")
#
library(visualizeR)
#
temporalPlot(igueldo.2000, analog.2000, regression.2000, neuralnet.2000)
require(MBC)
QDM=QDM(data1$pr,
data1$CMIP6, data1$CMIP6, ratio=FALSE, trace=0.05, trace.calc=0.5*trace,
jitter.factor=0, n.tau=NULL, ratio.max=2,
ratio.max.trace=10*trace, ECBC=FALSE,
ties='first',
subsample=NULL, pp.type=7)
#
write.table(QDM,
'QDM.txt',row.names=TRUE,col.names=TRUE)
#
#
http://web.vu.lt/mif/a.buteikis/wp-content/uploads/2020/04/Example_05.html
#
https://waterprogramming.wordpress.com/author/keyvanmalek/
#
biascorrection # BIAIS CORRECTION OF MODEL IN R
#
https://waterprogramming.wordpress.com/2020/09/15/introducing-the-r-package-biascorrection/
#
https://waterprogramming.wordpress.com/author/keyvanmalek/
#--------
CDF-t to correct the interpolated ERA-Interim data: SPATIAL CORRECTION
library(CDFt)
CT
<-CDFt(x, y, y, npas=1000, dev=2) #;
CT$x
#
BIAISCORRECTION
#
https://thaos.github.io/cdft_vignette/#applying_cdf-t_to_correct_the_interpolated_era-interim_data
#
devtools::install_github("SantanderMetGroup/climate4R.datasets")
#
remotes::install_github("VeruGHub/easyclimate")
#
easyclimate
require(climate4R.datasets)
require(climdex.pcic)
library(easyclimate)
library(terra)
library(ggplot2)
library(tidyterra)
library(mapSpain)
require(sf)
#
coords
<- data.frame(lon = 11.55, lat = 43.15)
prec
<- get_daily_climate(coords,period = "2001-01-01:2001-01-03",
climatic_var = "Prcp", version = 4)
sobrarbe
<- mapSpain::esp_get_comarca(comarca = "djibouti")
plots_harv
<- read_sf("C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
sobrarbetemp
<- get_daily_climate(coords = plots_harv,climatic_var =
"Tmax",period = "2020-05-01:2020-05-03",output =
"raster")
##
Polygons
coords
<- terra::vect("POLYGON ((11, 14, 40, 43))")
#
class(sobrarbe)
# sobrarbe <- project(vect(sobrarbe), "EPSG:4326")
x11();plot(sobrarbetemp,
col = rev(RColorBrewer::brewer.pal(9, "RdYlBu")), smooth = TRUE, nc = 3)
x11();ggplot()
+
geom_spatraster(data = sobrarbetemp) +
facet_wrap(~lyr, ncol = 3) +
scale_fill_distiller(palette =
"RdYlBu", na.value = "transparent") +
geom_spatvector(data = sobrarbe, fill = NA) +
labs(fill = "Maximum\ntemperature
(ºC)") +
scale_x_continuous(breaks = c(-0.25, 0,
0.25)) +
scale_y_continuous(breaks = seq(42.2, 42.8,
by = 0.2)) +
theme_minimal()
#
https://www.sciencedirect.com/science/article/abs/pii/S1364815223000130
#
https://github.com/VeruGHub/easyclimate
#
#
1] Bias Corrected Spatial Disaggregation (BCSD) downscaling algorithm :
#
rm(list = ls())
library(ncdf4)
library(abind)
library(dplyr)
library(RNetCDF)
library(raster)
library(sp)
library(rgdal)
library(terra)
library(ggplot2)
library(dplyr)
qm
<- function(obs,his,fu) {
quantile <- ecdf(his)(fu)
bc <- quantile(obs,quantile,na.rm=FALSE)
bc <- as.numeric(bc)
return(bc)
}
obs.pr<-
dat$CHIRPS
his.pr<-
dat$pr_CanESM5hit
fu.pr
<- r
#
host
<- array(data=NA,dim=dim(fu.pr)) # create the host
blank matrix
#
for (i in 1:dim(obs.pr)[1]) {
for (j in 1:dim(obs.pr)[2]) {
if (is.na(obs.pr[i,j,1])) {
host[i,j,] <- NA # All
cells oustside VN will be NA
} else {
host[i,j,] <-
qm(obs.pr[i,j,],his.pr[i,j,],fu.pr[i,j,])
# QM for inside cells
}
}
}
#
#
https://github.com/quanta1985/Bias-Correct-and-Spatial-Dissaggregation/blob/main/temperature/BC.R
#
https://github.com/quanta1985/Bias-Correct-and-Spatial-Dissaggregation/blob/main/precipitation/BC.R
#
#
2] Bilinear interpolation downscaling :
library(data.table)
require(metR)
require(fields)
library(akima)
#
data(geopotential);
head(geopotential)
geopotential
<- geopotential[date == date[1]]
#
new grid
x.out
<- seq(0, 360, by = 10)
y.out
<- seq(-90, 0, by = 10)
#
Interpolate values to a new grid
interpolated
<- geopotential[, Interpolate(gh ~ lon + lat, x.out,
y.out)];head(interpolated)
#
Interpolate multiple values
geopotential[,
c("u", "v") := GeostrophicWind(gh, lon, lat)]
interpolated
<- geopotential[, Interpolate(u | v ~ lon + lat, x.out,
y.out)];head(interpolated)
#
#
Interpolate values following a path
lats
<- c(-34, -54, -30) # start and end
latitudes
lons
<- c(302, 290, 180) # start and end
longituded
path
<- geopotential[, Interpolate(gh ~ lon + lat, as.path(lons, lats))]
#
https://search.r-project.org/CRAN/refmans/metR/html/Interpolate.html
#
x <-
c(0, 2.5, 5, 7.5, 10)
y <-
c(50, 55, 60, 65, 70)
z <-
matrix(rnorm(25), 5, 5)
x0
<- seq(0, 10, .5)
y0
<- seq(50, 70, length = length(x0))
bilinear(x,
y, z, x0, y0)
#
https://stackoverflow.com/questions/57501464/using-the-akima-bilinear-function-for-interpolation
#
data(lennon)
#
create the surface object
obj <-
list( x= 1:20, y=1:20, z= lennon[ 201:220, 201:220])
#
sample at 50 equally spaced points
temp<-
seq( 1,20,,50)
make.surface.grid(
list( temp,temp))-> loc
interp.surface(
obj, loc)-> look
#
take a look
image.plot(
as.surface( loc, look))
#
https://www.image.ucar.edu/GSP/Software/Fields/Help/interp.surface.html
#*
#
Perform rainfarm downscaling (SQUARE data!)
#
Make some sample synthetic rainfall data
#
??rainfarm
require(rainfarmr)
r
<- exp(rnorm(4 * 4 * 10));r
dim(r)
<- c(4, 4, 10) # 4x4 10 fois
dim(r) # spatial dimensions of input
array must be square !
r[
, , 1] # 4x4 1 groupe # i * j * k = dim/length
dim(varpr)
length(lswt_vec_long) # length(lswt_vec_long)-(
395*395*1824 )
varpr1=lswt_vec_long[-c(1:66600)]
dim(lswt_vec_long)<-
c(388,402,1825) # /k puis sqrt(ij)
#
Downscale with spectral slope=1.7 to size 32x32
#
rainfarm(r, slope, nf, weights = 1, fglob = FALSE, fsmooth = TRUE, verbose =
FALSE)
rd
<- rainfarm(varpr, 1.7, 8, fsmooth=FALSE)
#
#
Verify that downscaled data maintained original box averages
agg(rd[ ,
, 1], 4)
#
https://www.medscope-project.eu/products/data/
#
https://cran.r-project.org/web/packages/rainfarmr/rainfarmr.pdf
#
#
DOWNSCALING WITH RainFARM method
#
Stochastic Precipitation Downscaling with the RainFARM Method
#??RainFARM
#??s2dv_cube
#
set.seed(111)
obs
<- rnorm(2 * 3 * 4 * 8 * 8)
# exp <- 1 : (2 * 3 * 4 * 8 * 8)
length(obs)
dim(obs)
<- c(dataset = 1, member = 2, sdate = 3, ftime = 4, lat = 8, lon = 8)
# obs
<-1:100
# dim(obs)
<- c(lat = 2, time = 10, lon = 5) # X Y N
#
Transposer en ligne en repetant 8 fois car pas 0.5 en bas.
# length
de la variable = decomposition en 6 section.
#
lon
<- seq(10, 13.5, 0.5)
lat
<- seq(40, 43.5, 0.5)
coords
<- list(lon = lon, lat = lat)
data
<- list(data = obs, coords = coords)
class(data)
<- "s2dv_cube"
#
exp1 <- s2dv_cube(data = exp_original)
attractor
<- CST_ProxiesAttractor(data = data, quanti = 0.6)
slopes <- CST_RFSlope(data,time_dim =
c("member", "ftime"))
ww <- CST_RFWeights(data,nf=20,lon =
exp$coords$lon, lat = exp$coords$lat)
#
class(data)
names(data)
dim(data$data)
#
library(CSTools)
exp
<- lonlat_prec
names(exp)
dim(exp$data)
#
#||Standard
downscaling without climatological weights
#
1 degree (about 100 km) to 0.05 degrees (about 5 km) : 100/5 = 20 (nf)
#
creation of is simlilar to exp = data
#
class(exp)
exp_down
<- CST_RainFARM(exp, nf = 20, kmin = 1, nens = 3,
time_dim =
c("member", "ftime"))
#exp_down$data[1:3]
#
class(exp$data)
downscaled
<- RainFARM(exp$data, exp$coords$lon, exp$coords$lat,
nf = 20, kmin = 1, nens
= 3,
time_dim =
c("member", "ftime"))
#downscaled$data[1:3]
#
#
weights : Matrix with
climatological weights which can be obtained using the CST_RFWeights function.
If weights = 1. (default)
#
slope : (shape and relief of the land) Prescribed
spectral slope. The default is slope = 0.
#
nf : Refinement factor for
downscaling (the output resolution is increased by this factor).
#
kmin : First wavenumber for spectral slope
(default: kmin = 1).
#
nens
: Number of ensemble members to produce (default: nens = 1).
#
fglob : Logical to conserve global
precipitation over the domain (default: FALSE).
#
fsmooth : Logical to conserve
precipitation with a smoothing kernel (default: TRUE).
#
nprocs : The number of parallel
processes to spawn for the use for parallel computation in multiple cores.
(default: 1)
#
time_dim : String or character array with name(s) of dimension(s) (e.g.
"ftime", "sdate", "member" ...) over which to
compute spectral slopes.
#
verbose : Logical for verbose output
(default: FALSE).
#
drop_realization_dim : Logical to
remove the "realization" stochastic ensemble dimension, needed for
saving data through function CST_SaveData (default: FALSE)
#
ProxiesAttractor : This function
computes two dinamical proxies of the attractor: The local dimension (d) and
the inverse of the persistence (theta) for an 's2dv_cube' object.
#
PLOTTING RESULTS:
x11();par(mfrow=c(1,3))
# Valeur
en pixelage :
a <-
exp$data[1, 1, 1, 17, , ] * 86400 * 1000;a[a > 60] <- 60;a
image(exp$coords$lon,
rev(exp$coords$lat), t(apply(a, 2, rev)), xlab = "lon", ylab =
"lat",
col = rev(terrain.colors(20)), zlim =
c(0,60))
map("world",
add = TRUE); title(main = "pr 17/03/2010 original")
#
a
<- exp_down$data[1, 1, 1, 1, 17, , ] * 86400 * 1000;a[a > 60] <- 60;a
image(exp_down$coords$lon,
rev(exp_down$coords$lat), t(apply(a, 2, rev)), xlab = "lon", ylab =
"lat",
col = rev(terrain.colors(20)), zlim = c(0, 60))
map("world",
add = TRUE); title(main = "pr 17/03/2010 downscaled")
data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#dev.off()
#
a
<- downscaled$data[1, 1, 1, 17, , ] *
86400 * 1000;a[a > 60] <- 60;a
image(downscaled$lon,
rev(downscaled$lat), t(apply(a, 2, rev)), xlab = "lon", ylab =
"lat",
col = rev(terrain.colors(20)), zlim = c(0, 60))
map("world",
add = TRUE); title(main = "pr 17/03/2010 downscaled")
data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#dev.off()
#
library(mapdata)
library(ggplot2)
library(ggmap)
library(sp)
library(ggfortify)
#
jp
<- ggplot2::map_data('world2', 'DJIBOUTI'); class(jp);head(jp)
jp
<- map('world2', 'DJIBOUTI', plot =
FALSE, fill = TRUE)
p
<- autoplot(jp, geom = 'polygon', fill = 'region') +
theme(legend.position="none"); p
df
<- data.frame(long = c(43.13,42.15),
lat = c(11.55, 12.00), label = c('Tokyo', 'Osaka'),
population = c(1335, 886))
coordinates(df)
<- ~ long + lat; class(df)
autoplot(df,
p = p, colour = 'red', size = 10)
#
#
Downscaling using climatological weights and slopes
#
ww
<- CST_RFWeights("./worldclim.nc", nf = 20, lon = exp$coords$lon,
lat = exp$coords$lat)
slopes
<- CST_RFSlope(exp, time_dim = c("member", "ftime"))
#
https://gmd.copernicus.org/articles/15/6115/2022/
#
require(devtools)
require(zeallot)
require(fields)
require(raster)
require(abind)
require(qmap)
require(s2dverification)
library(downscaleR)
require(CSTools)
require(rainfarmr)
#
library(ncdf4)
library(lubridate)
library(raster)
library(tidyverse)
library(raster)
library(rgdal)
require(RColorBrewer)
library(purrr)
library(maptools)
require(stars)
library(sf)
library(ggplot2)
#
EPSG
<- make_EPSG();EPSG
#
dtm_harv <- read_stars("data/HARV/HARV_dtmCrop.tif")
#
plots_harv <- read_sf("data/HARV/harv_plots.shp")
#
setwd("C:/Users/hp/Downloads/GCM
DATA"); getwd()
#
pr_day_CanESM5_historical_r1i1p1f1_gn_19800101-20141231_v20190429(mondiale)
#"pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc"
nc_fname<-"pr_day_CanESM5_historical_r1i1p1f1_gn_19800101-20141231_v20190429(mondiale).nc"
our_nc_data<-nc_open("pr_day_CanESM5_historical_r1i1p1f1_gn_19800101-20141231_v20190429(mondiale).nc") # correct name
#
View(our_nc_data)
#
attributes(our_nc_data$var)
varpr= ncvar_get(our_nc_data,"pr" )
dim(varpr)
dim(varpr)[3]/365
head(varpr)
length(varpr)
#
varlat
<- ncvar_get(our_nc_data,
"lat" );head(varlat)
varlon
<- ncvar_get(our_nc_data,
"lon" );head(varlon)
#
vartime= ncvar_get(our_nc_data,
"time_bnds" );head(vartime)
start.date
= as.Date('1981-01-01')
times.sec
<- ncvar_get(our_nc_data, 'time')
times.day
<- times.sec/(24*3600)
datex
<- start.date + times.day
length(datex)
time
= datex
#
grd <- expand.grid(varlon, varlat, vartime)
#
grid <- expand.grid(lon=lon, lat=lat)
#
cutpts <- c(-50,-40,-30,-20,-10,0,10,20,30,40,50)
#
levelplot(tmp_slice ~ lon * lat, data=grid, at=cutpts, cuts=11, pretty=T,
#
col.regions=(rev(brewer.pal(10,"RdBu"))))
#
https://pjbartlein.github.io/REarthSysSci/netCDF.html
#
https://www.r-bloggers.com/2017/03/resizing-spatial-data-in-r/
#
nc_df <- data.frame(cbind(varpr));csv_fname <-
"netcdf_filename.csv"
#
write.table(nc_df, csv_fname, row.names=FALSE, sep=";");getwd()
#
https://ropensci.org/blog/2019/11/05/tidync/
#
dim(varpr)
vp<-
varpr[, , 1]
#
xtime=seq(as.Date("1980/1/1"),
as.Date("1980/12/31"), "days")
length(xtime)
# "%Y-%m-%d %Z
#
vp1<-
varpr[, , 1:365]
# r <-
raster::raster(file.choose())
# r1 <-
raster::rotate(r);r1
#
ATTENTION La resolution est incorrecte avec cette approche(VOIR OU BIEN)
r <-
raster(t(vp), xmn=min(varlon), xmx=max(varlon), ymn=min(varlat),
ymx=max(varlat), crs=CRS("+proj=longlat +ellps=WGS84 +datum=WGS84
+no_defs+ towgs84=0,0,0"))
r
<- flip(r, direction='y');res(r)
x11();plot(r)
data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#
ROI <-
extent(10.81,12.81, 41.56,43.59)
r.crop
<- crop(r,ROI)
plots_harv
<- read_sf("C:/Users/hp/Documents/ArcGIS input/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
x11();display.brewer.all()
x11();plot(r.crop)
plot(plots_harv
,col="transparent",add=T)
#
dir
<- "C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK SHAPEFILE
Djibouti/Regions.shp"
name
<- "Regions"
hauls
<- readOGR(dir,name);crs(hauls)
r.clip
<- mask(r,hauls)
x11();plot(r.clip)
plot(hauls,col="transparent",add=T)
#
x11();plot(r.clip,
xlim=c(41,44), ylim=c(10,13));plot(hauls,col="transparent",add=T)
# sf_object <- sf::st_read(file.choose())
# points(centroids$easting,centroids$northing,
pch=0, cex = 2 )
#
dir
<- "C:/Users/hp/Downloads/GCM
DATA/pr_day_CanESM5_historical_r1i1p1f1_gn_19800101-20141231_v20190429(mondiale).nc"
b
<- brick(dir, varname="pr", lvar=4)
x11();hist(b,main
= "",col = "springgreen", xlab = "Height (m)")
res(b) # resolution x 111
proj4string(b) # WGS AND YEAR
print(b)
b
x11();display.brewer.all()
x11();plot(b$X2014.12.31,
col=brewer.pal(9,"Reds"))
#
1 kg/m2/s = 86400 mm/day
b_mmd
<- (b * 86400) # Converting
mm/day
dim(b);dim(b_mmd) # 12775 couches or data de
profondeur de netcdf
dim(b_mmd)[3]/365
x11();
par(mfrow=c(1,2));plot(b_mmd[[12775]]);plot(b_mmd$X2014.12.31)
#
x11();display.brewer.all()
#
x11();plot(b_mmd,
y=1, main="", col=
brewer.pal(9,"YlOrRd")) # FIRST DAY
plot(hauls,
add=TRUE, cex=0.5, col="red")
#
map("worldHires",col="grey90", border="grey50",
fill=TRUE, add=TRUE)
data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#
#
Extract values NO EFFICACE !
ext
<- raster::extract(b_mmd$X1981.01.01,hauls,method='bilinear',fun = mean, sp
= TRUE,stringsAsFactors = FALSE)
ext1
<- raster::extract(b_mmd$X1981.01.01, hauls, method='simple',fun=mean, df=TRUE)
#
haul_temp <- extract(b_mmd, hauls)
x11();plot(ext,
col= brewer.pal(11,"BrBG"))
xtime=seq(as.Date("1980/1/1"),
as.Date("1980/12/31"), "days")
length(xtime)
# "%Y-%m-%d %Z"
#
by1980
<- mean(b[[1:366]]);by1980
by1981
<- mean(b[[367:732]]);by1981 # by1981 <- mean(b_mmd[[367:732]]);by1981
#
getwd() #
res(by1981)
b_mmd1981
<- (by1981 * 86400)
#
setwd("C:/Users/hp/Desktop")
bf
<- writeRaster(b_mmd1981, filename=file.path("GCM_1981.tif"),
options="INTERLEAVE=BAND", overwrite=TRUE);getwd()
#
#
https://www.benjaminbell.co.uk/2018/01/getting-climate-data.html
#https://rfrelat.github.io/Spatial2_MultiExamples.html
#
dim(varpr)
#
dim(by1980)
#
by1980[, 1, ]
#
by1980[1, , ]
#
by1980[, ,1 ]
#
x11();plot(by1980)
#
# MAX MIN
DJIBOUTI https://earthexplorer.usgs.gov/
lon
<- seq(41.56, 43.59, 0.5)
lat
<- seq(10.81, 12.81, 0.5)
coords
<- list(lon = lon, lat = lat)
dim(by1981)
by1981n=by1981[,,1]
length(by1981n)
#
decomposer length en 6 facteur entier pour dimensionner en s2cube format:
# CALCULATEUR EN LIGNE
#
https://www.solumaths.com/fr/calculatrice-en-ligne/calculer/decompose_en_nombre_premier/19433
dim(by1980n)
<- c(dataset = 1, member = 2, sdate = 4, ftime = 64, lat = 4, lon = 4)
dim(by1981n)
<- c(dataset = 1, member = 2, sdate = 4, ftime = 64, lat = 4, lon = 4)
#
dim() <- c(dataset = 1, member = 2, sdate = 3, ftime = 4, lat = 8, lon = 8)
#
dim() <- c(dataset = 1, member = 6, sdate = 3, ftime = 31, lat = 4, lon = 4)
# 80 lon/lat en downscaling
#
https://www.tutorialspoint.com/r/r_arrays.htm#
#
data
<- list(data = by1981n, coords = coords)
class(data)
<- "s2dv_cube"
names(data);
dim(data$data)
#
exp1 <- s2dv_cube(data = exp_original)
#
attractor <- CST_ProxiesAttractor(data = data, quanti = 0.6)
#
slopes <- CST_RFSlope(data,time_dim = c("member",
"ftime"))
#
ww <- CST_RFWeights(data,nf=20,lon = exp$coords$lon, lat = exp$coords$lat)
#
library(CSTools)
cbind(res(by1981))
nfc
= (res(by1981)[1]*111)-0.5 ; nfc # 100/5
= 20 (nf) (+) (degree x 111)- 0.5
#
311/0.03 = 10366.67 # facteur en 30 meters.
class(data)
#
#
s2dv_cube version[ list object:
exp_down1
<- CST_RainFARM(data, nf = 20, kmin = 1, nens = 3,
time_dim =
c("member", "ftime"))
# OR array
version (matrice +2 dimension): mais
même resultat avec deuxieme possibilité.
#
downscaled <- RainFARM(data$data, exp$coords$lon, exp$coords$lat, nf = nfc,
kmin = 1, nens = 3, time_dim = c("member", "ftime"))
a
<- exp_down1$data[1, 1, 1, 1, 17, , ] * 86400
a[a
> 60] <- 60;a
# a <-
exp_down$data[1, 1, 1, 1, 17, , ] * 86400 * 1000;a[a > 60] <- 60;a # 60
pour limite de z
length(exp_down1$coords$lon)
length(rev(sort(exp_down1$coords$lat,decreasing=TRUE)))
dim(t(apply(a,
2, rev)))
# Même
dimension pour x y z
image(exp_down1$coords$lon[1:80],
rev(sort(exp_down1$coords$lat,decreasing=TRUE))[1:80], t(apply(a, 2, rev)),
xlab = "lon", ylab = "lat", col = rev(terrain.colors(20)),
zlim=c(0,60) )
#
map("world", add = TRUE); title(main = "pr 17/03/2010
downscaled")
data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#dev.off()
#
library(rasterize);library(raster)
x=
exp_down1$coords$lon
y=
exp_down1$coords$lat
z=
exp_down1$data[1, 1, 1, 1, 17, , ] * 86400
#
r
<-raster(z,
xmn=range(x)[1],
xmx=range(x)[2],
ymn=range(y)[1], ymx=range(y)[2],
crs=CRS("+proj=longlat +datum=WGS84
+no_defs"))
#
"+proj=utm +zone=11 +datum=NAD83"
r
res(r)
plot(r);data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#
setwd("C:/Users/hp/Desktop")
bf
<- writeRaster(r, filename=file.path("GCM_1981DOWN.tif"),
options="INTERLEAVE=BAND", overwrite=TRUE)
getwd()
#
#
https://stackoverflow.com/questions/14513480/convert-matrix-to-raster-in-r
#
a
<- data$data[1, 1, 1, 17, , ] * 86400 * 1;a[a > 60] <- 60;a
image(data$coords$lon,
rev(sort(data$coords$lat,decreasing=TRUE)), t(apply(a, 2, rev)), xlab =
"lon", ylab = "lat",
col = rev(terrain.colors(20)), zlim = c(0,60))
data(wrld_simpl);plot(wrld_simpl,
add = TRUE)
#
library(rasterize);library(raster)
x=
data$coords$lon
y=data$coords$lat
z=
data$data[1, 1, 1, 17, , ] * 86400
r
<-raster( z,xmn=range(x)[1], xmx=range(x)[2],ymn=range(y)[1],
ymx=range(y)[2], crs=CRS("+proj=longlat +datum=WGS84 +no_defs"))
#
"+proj=utm +zone=11 +datum=NAD83"
r;res(r)
setwd("C:/Users/hp/Desktop")
bf
<- writeRaster(r, filename=file.path("GCM_1981ORG.tif"), options="INTERLEAVE=BAND",
overwrite=TRUE);getwd()
#
autodownscaled
#
# Image PLOT
#
Data
x
<- -10:10
y <-
-10:10
z <-
sqrt(outer(x ^ 2, y ^ 2, "+"))
image(x,
y, z)
#
You can also type, the following
#
but the axes will be between 0 and 1
image(z)
#
image(x,
y, z)
contour(x,
y, z, add = TRUE)
#
https://www.r-bloggers.com/2022/10/how-to-use-the-image-function-in-r/
#
#
Retrieval and formatting : CST_Load*, CST_Anomaly*, CST_SaveExp, CST_MergeDims,
CST_SplitDims, as.s2dv_cube, s2dv_cube
#
Classification : CST_MultiEOF, CST_WeatherRegimes*, CST_RegimesAssign*,
CST_CategoricalEnsCombination, CST_EnsClustering*
#
Downscaling : CST_Analogs*,
CST_RainFarm*, CST_RFTemp, CST_AdamontAnalog, CST_AnalogsPredictors
#
Correction : CST_BEI_Weighting*, CST_BiasCorrection,
CST_Calibration, CST_QuantileMapping,CST_DynBiasCorrection
#
Assessment : CST_MultiMetric*,
CST_MultivarRMSE*
#
#
#
#Coordinated
Regional Climate Downscaling Experiment [CORDEX]
#
https://sci-hub.st/10.1016/j.heliyon.2021.e07791
#
getwd()
setwd("C:/Users/hp/Downloads/RCM
DATA")
dir()
RCM
<- stack("C:/Users/hp/Downloads/RCM
DATA/pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc")
#
ncol(RCM
)
nrow(RCM
)
ncell(RCM
)
nlayers(RCM
)
dim(RCM
)
#
projection(RCM
)
res(RCM
)
inMemory(RCM
) #check if the data is stored in memory
fromDisk(RCM
) #check if the data was read from disk
#
names(RCM
)
head(names(RCM
))
x11();plot(RCM$X1981.01.01)
tail(names(RCM
))
x11();spplot(RCM$X1985.12.31)
# lattice plot, returns a trellice
#
dir<- "C:/Users/hp/Downloads/RCM
DATA/pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc"
#
setwd("C:/Users/hp/Downloads/RCM
DATA")
nc_fname<-"pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc"
our_nc_data<-nc_open("pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc") # correct name
#
data_v1<-ncvar_get(our_nc_data,attributes(our_nc_data$var)$names[5])
;data_v1
#
attributes(our_nc_data$var)
varpr= ncvar_get(our_nc_data,"pr" )
varlon= ncvar_get(our_nc_data,"lon" )
varlat= ncvar_get(our_nc_data,"lat" )
#
start.date
= as.Date('1981-01-01')
times.sec
<- ncvar_get(our_nc_data, 'time')
times.day
<- times.sec/(24*3600)
datex
<- start.date + times.day
length(datex)
time
= datex
#-#-#
dim(varpr)
length(varpr)
vp
<- varpr[, , 1] # FIRST DAY
dim(vp)
vp1
<- varpr[, , 1:365] # FIRST YEAR
dim(vp1) # MATRIX 3 DIMENSION
#-#-#
dir<-
"C:/Users/hp/Downloads/RCM
DATA/pr_AFR-22_CCCma-CanESM2_historical_r1i1p1_CCCma-CanRCM4_r2_day_19810101-19851231.nc"
b
<- brick(dir, varname="pr", lvar=4)
res(b) # resolution x 111
projection(b)
proj4string(b) # WGS AND YEAR
#
proj4string(x) <-st_as_sf(b, coords = c("varlon",
"varlat"),CRS="+proj=longlat +datum=WGS84 +ellps=WGS84
+towgs84=0,0,0")
print(b)
b
crs(b)
<-"+proj=longlat +datum=WGS84 +ellps=WGS84 +towgs84=0,0,0"
x11();plot(b[[1]])
# $X1981.01.01
r <-
flip(b[[1]], direction="y"); x11();plot(r) # renversement de l'image
#
plots_harv
<- read_sf("C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
dir
<- "C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK SHAPEFILE
Djibouti/Regions.shp"
name
<- "Regions"
hauls
<- readOGR(dir,name)
crs(hauls)
x11();plot(hauls,
col=1:5)
crs(r)
r.clip
<- mask(r,hauls)
x11();plot(r.clip[[1]],
xlim=c(40,43),ylim=c(10,14))
#
ROI <-
extent(10.81, 12.81,41.56, 43.59)
r.crop
<- crop(r,ROI)
x11();plot(r.crop[[1]]);
res(r.crop[[1]])
#
plot(r.clip); plot(hauls,col="transparent",add=T)
ext
<- raster::extract(r, hauls, method='simple',fun=mean,df=TRUE)
ext1
<- raster::extract(r, hauls, method='simple',fun=mean,sp=TRUE)
x11();plot(ext[[1]]);
res(ext[[1]])
#
toolik_series <-raster::extract(r_brick,
SpatialPoints(cbind(toolik_lon,toolik_lat)), method ='bilinear', fun=mean,df=TRUE)
#
xtime=seq(as.Date("1981/1/1"),
as.Date("1981/12/31"), "days")
length(xtime)
# "%Y-%m-%d %Z"
#
b1981=mean(b[[1:365]])
b1981_mmd
<- (b1981 * 86400)
dim(b1981_mmd)
#
setwd("C:/Users/hp/Desktop")
bf
<- writeRaster(b1981_mmd, filename=file.path("RCM_1981.tif"),
options="INTERLEAVE=BAND", overwrite=TRUE);getwd()
res(bf)
#
b1981n=b1981_mmd[,,1]
length(b1981n) # 155976
# <-
plus puissante = | Sinon des NA.
b1981n
<- b1981n[1: (1*2*4*304*8*8)];
length(b1981n) # TRICHERIE
#
dim(b1981n)
<- c(dataset = 1, member = 2, sdate = 4, ftime = 304, lat = 8, lon = 8)
#
lon
<- seq(41.56, 43.59, 0.5)
lat
<- seq(10.81, 12.81, 0.5)
coords
<- list(lon = lon, lat = lat)
data
<- list(data = b1981n, coords = coords)
class(data)
<- "s2dv_cube"
#
library(CSTools)
nfc
= (res(b1981)[1]*111)-0.5 ; nfc # 100/5
= 20 (nf) (+) (degree x 111)- 0.5
#
311/0.03 = 10366.67 # facteur en 30 meters.
#
#
s2dv_cube version[ list object:
exp_down1
<- CST_RainFARM(data, nf = 20, kmin = 1, nens = 3,
time_dim =
c("member", "ftime")) # Probleme de size
1.4 Gb
# Reduire
la taille en un an dans s2dv_cube avant car plus d'infos prb resultats.
#
"+proj=longlat +datum=WGS84 +ellps=WGS84 +towgs84=0,0,0"
# r_brick
<- brick(varpr, xmn=-24.86, xmx=60.50001, ymn= -45.98, ymx=42.46,
crs=CRS("+proj=ob_tran +o_proj=longlat +o_lon_p=-162.0 +o_lat_p=39.25
+lon_0=180 +lat_0=0"))
#
r_brickP<- projectRaster(r_brick, crs="+init=epsg:3035")
#
#
DOUTE resolution CHANGE:
lon
<- ncvar_get(our_nc_data, varid ="lon")
lat
<- ncvar_get(our_nc_data, varid="lat")
#
TdmEUR98 <- ncvar_get(our_nc_data, varid="pr")
#
TymEUR98 <- rowMeans(varpr,na.rm = T,dims=2) # Calculate year mean
#
data <- as.data.frame(TymEUR98)
vp_mmd
<- (vp1 * 86400)
length(vp_mmd)
data
<- as.data.frame(vp_mmd)
#data
<- as.data.frame(mean(vp_mmd))
#
r <- raster(t(data),xmn=min(lon),xmx=max(lon),ymn=min(lat),ymx=max(lat),
crs=CRS( "+proj=longlat +datum=WGS84 +ellps=WGS84
+towgs84=0,0,0"))
res(r)
#
"+proj=ob_tran +o_proj=longlat +o_lon_p=-162 +o_lat_p=39.25 +lon_0=180
+to_meter=0.01745329"
#
"+proj=longlat +datum=WGS84 +no_defs"
x11();r;
plot(r);data(wrld_simpl);plot(wrld_simpl, add = TRUE)
#
setwd("C:/Users/hp/Desktop")
bf
<- writeRaster(r, filename=file.path("RCM_19811.tif"),
options="INTERLEAVE=BAND", overwrite=TRUE); getwd()
#-#-#-#-#-#-#-#-#
r
<- flip(r, direction="y"); plot(r)
dir
<- "C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK SHAPEFILE
Djibouti/Regions.shp"
name
<- "Regions"
hauls
<- readOGR(dir,name)
crs(hauls)
plot(hauls)
crs(r)
r.clip
<- mask(r,hauls)
plot(r.clip)
plot(hauls,col="transparent",add=T)
#
ext
<- raster::extract(SJER_chm,
SJER_plots,
buffer = 20, # specify a 20
m radius
fun = mean, # extract the MEAN value from each plot
sp = TRUE, # create spatial object
stringsAsFactors = FALSE)
#
ext <- extract(r, hauls, method='simple')
#
v <- st_transform_proj(hauls,"+proj=longlat +datum=WGS84
+no_defs")
#
https://gis.stackexchange.com/questions/429065/reprojecting-raster-or-vector-crs-and-extracting-raster-values-based-on-vector-b
#
#
Statistical Downscaling raster.downscale & KrigR
library(geodata)
library(terra)
require(sf)
library(KrigR)
library(spatialEco)
#
elev
<- geodata::elevation_30s(country="DJI", path=tempdir())
slp
<- terrain(elev, v="slope")
tmax
<- geodata::worldclim_country(country="DJI",
var="tmax",path=tempdir())
tmax
<- crop(tmax[[1]], ext(elev))
#
#
Downscale temperature
x=c(elev,slp)
names(x)
<- c("elev","slope")
y=tmax
names(y)
<- c("tmax")
tmax.ds
<- raster.downscale(x, y, scatter=TRUE, residuals = TRUE,
uncertainty =
"prediction", se = TRUE)
#
Downscale temperature
x=c(elev,slp)
names(x)
<- c("elev","slope")
y=tmax
names(y)
<- c("tmax")
tmax.ds
<- raster.downscale(x, y, scatter=TRUE, residuals = TRUE,uncertainty =
"prediction", se = TRUE)
#
X11();opar
<- par(no.readonly=TRUE);par(mfrow=c(2,2))
plot(tmax,
main="Temp max")
plot(x[[1]],
main="elevation")
plot(x[[2]],
main="slope")
plot(tmax.ds$downscale,
main="Downscaled Temp max")
par(opar)
#
#
Plot residual error and raw prediction +/- intervals
x11();opar
<- par(no.readonly=TRUE);par(mfrow=c(2,2))
plot(tmax.ds$std.error,
main="Standard Error")
plot(tmax.ds$residuals,
main="residuals")
plot(tmax.ds$uncertainty[[1]],
main="lower prediction interval")
plot(tmax.ds$uncertainty[[2]], main="upper prediction interval")
par(opar)
#
#
plot prediction uncertainty PREDICTION
INTERVAL: LOW and UPPER
x11();opar
<- par(no.readonly=TRUE);par(mfrow=c(2,1))
plot(tmax.ds$downscale
- tmax.ds$uncertainty[[1]],
main="lower prediction interval")
plot(tmax.ds$downscale
- tmax.ds$uncertainty[[2]],
main="upper prediction interval")
par(opar)
#
WITH KrigR :
getwd()
setwd("C:/Users/hp/Downloads/GCM
DATA")
dir()
Dir.Covariates
<-"C:/Users/hp/Downloads/GCM DATA"
plots_harv
<- read_sf("C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
#
SpatialPolygonsRaw
<-
stack("pr_day_CanESM5_historical_r1i1p1f1_gn_19800101-20141231_v20190429(mondiale).nc")
Covs_ls
<- download_DEM(Train_ras = SpatialPolygonsRaw, # the data we want to
downscale
Target_res = .02, # the resolution we want to downscale to
Shape =
plots_harv, # extra spatial preferences
Dir = Dir.Covariates #
where to store the covariate files
)
Covs_ls
#
Plot_Covs(Covs_ls,
Shape_shp)
#
Dir.Exports
<-"C:/Users/hp/Downloads/GCM DATA"
SpatialPolygonsKrig
<- krigR(Data = SpatialPolygonsRaw,
# data we want to krig as a raster object
Covariates_coarse
= Covs_ls[[1]], # training covariate as a raster object
Covariates_fine =
Covs_ls[[2]], # target covariate as a
raster object
Keep_Temporary =
FALSE, # we don't want to
retain the individually kriged layers on our hard-drive
Cores = 1, # we want to krig on
just one core
FileName =
"SpatialPolygonsKrig", # the file name for our full kriging output
Dir = Dir.Exports
# which directory to save our final input in
)
#
SpatialPolygonsKrig[-3]
# we will talk later about the third element
Plot_Krigs(SpatialPolygonsKrig,
Shp = Shape_shp,
Dates = c("01-1995",
"02-1995", "03-1995", "04-1995"))
#
Covs_ls
<- download_DEM(Train_ras = SpatialPolygonsRaw,
Target_res = .02,
Shape = Shape_shp,
Dir = Dir.Covariates,
Keep_Temporary = TRUE)
#
SpatialPolygonsLocalKrig
<- krigR(Data = SpatialPolygonsRaw,
Covariates_coarse = Covs_ls[[1]],
Covariates_fine = Covs_ls[[2]],
Keep_Temporary = FALSE,
Cores = 1,
nmax = 10,
FileName =
"SpatialPolygonsLocalKrig",
Dir =
Dir.Exports)
Plot_Krigs(SpatialPolygonsLocalKrig,
Shp = Shape_shp,
Dates = c("01-1995",
"02-1995", "03-1995", "04-1995"))
Plot_Raw(SpatialPolygonsLocalKrig[[1]]-SpatialPolygonsKrig[[1]],
Shp = Shape_shp,
Dates = c("01-1995",
"02-1995", "03-1995", "04-1995"))
#
SpatialPolygonsEns
<- stack(file.path(Dir.Data, "SpatialPolygonsEns.nc"))
DynUnc
<- SpatialPolygonsEns
KrigUnc
<- SpatialPolygonsKrig[[2]]
#
EnsDisagg
<- disaggregate(DynUnc, fact=res(DynUnc)[1]/res(KrigUnc)[1])
DynUnc
<- resample(EnsDisagg, KrigUnc)
FullUnc
<- DynUnc + KrigUnc
#
Plot_Raw(FullUnc,
Shp = Shape_shp,
Dates = c("01-1995", "02-1995",
"03-1995", "04-1995"),
COL = rev(viridis(100)))
#
https://www.erikkusch.com/courses/krigr/kriging/
#
#
Disaggregation modeling
# install.packages("disaggregation")
library("disaggregation")
set.seed(5)
dis_data
<- prepare_data(polygon_shapefile = shapes,
covariate_rasters =
covariate_stack,
aggregation_raster =
population_raster,
mesh.args =
list(max.edge = c(0.7, 8), cut = 0.05, offset = c(1, 2)),
id_var =
"ID_2", response_var = "inc", na.action = TRUE, ncores = 8)
summary(dis_data)
plot(dis_data)
#
fitted_model
<- disag_model(data = dis_data,
iterations = 1000,
family = "poisson", link = "log")
#
https://www.jstatsoft.org/article/view/v106i11
#
Spatial data analysis » Interpolation
library(rspat)
library(gstat)
r
<- rast(vca, res=10000)
vr
<- rasterize(vca, r, "prec")
plot(vr)
#
#|
Nearest neighbour interpolation
library(gstat)
d
<- data.frame(geom(dta)[,c("x", "y")],
as.data.frame(dta))
head(d)
#
gs
<- gstat(formula=prec~1, locations=~x+y, data=d, nmax=5, set=list(idp = 0))
nn
<- interpolate(r, gs, debug.level=0)
nnmsk
<- mask(nn, vr)
plot(nnmsk,
1)
#
#|
Inverse distance weighted
library(gstat)
gs
<- gstat(formula=prec~1, locations=~x+y, data=d)
idw
<- interpolate(r, gs, debug.level=0)
idwr
<- mask(idw, vr)
plot(idwr,
1)
#
idm
<- gstat(formula=OZDLYAV~1, locations=~x+y, data=p)
idp
<- interpolate(r, idm, debug.level=0)
idp
<- mask(idp, ca)
plot(idp,
1)
#
#|
Ordinary kriging
k
<- gstat(formula=OZDLYAV~1, locations=~x+y, data=p, model=fve)
#
predicted values
kp
<- interpolate(r, k, debug.level=0)
ok
<- mask(kp, ca)
names(ok)
<- c('prediction', 'variance')
plot(ok)
#
#|
Thin plate spline model
library(fields)
m
<- fields::Tps(p[,c("x", "y")], p$OZDLYAV)
tps
<- interpolate(r, m)
tps
<- mask(tps, idw[[1]])
plot(tps)
#
https://rspatial.org/analysis/4-interpolation.html
#
https://gis.stackexchange.com/questions/3670/spatio-temporal-interpolation-in-r-or-arcgis
#
#
Interpolation Create a Triangulated Irregular
#
install.packages("devtools")
#
devtools::install_github("ropensci/lawn")
library(lawn)
require(leaflet)
#
pts
<- lawn_random(bbox = c(-70, 40, -60, 60))
lawn_tin(pts)
##
Not run:
lawn_tin(pts)
%>% view
lawn_tin(lawn_random(bbox
= c(-70, 40, -60, 10))) %>% view
#
#
lawn_count(polygons = lawn_data$polygons_count, points =
lawn_data$points_count)
#
lawn_average(polygons = lawn_data$polygons_average, points =
lawn_data$points_average, 'population')
#
lawn_distance(from, to)
#
lawn_random(n = 5)
#
lawn_sample(dat, 2)
#
lawn_extent(lawn_data$points_average)
#
lawn_within(lawn_data$points_within, lawn_data$polygons_within)
#
https://www.rdocumentation.org/packages/lawn/versions/0.1.7
#
#
TIN mesh object for use within TELEMAC
#
BASIC FUNCTIONALITY
library(sf)
require(telemac)
require(RTriangle)
library(tidyverse)
#
#
load boundary as sf linestring
bnd
<- st_read(system.file("dem/boundary_lagos.gpkg", package =
"telemac"))
#
create t2d_tin object
tin_obj
<- tin(list(boundary = bnd), s = 90, a = 100^2, q = 30)
#
inspection
tin_obj
str(tin_obj)
plot(tin_obj,
pch = ".")
###
DEALING WITH INTERSECTING BREAKLINES
#
example boundary and with intersecting breaklines
test_bnd
<- st_linestring(
matrix(c(seq(0,100,5), rep(100,21), seq(100,0,-5), rep(0,21),
rep(0,21), seq(0,100,5), rep(100,21), seq(100,0,-5)),
ncol = 2)
)
%>% st_sfc()
test_brk
<- list(
st_linestring(matrix(c(seq(0,100,5),
rep(50,21)), ncol = 2)),
st_linestring(matrix(c(rep(50,21),
seq(0,100,5)), ncol = 2)),
st_linestring(matrix(c(seq(30,60,5),
rep(60,11),
rep(20,7),
seq(20,70,5)), ncol = 2))) %>% st_sfc()
#
get intersection points and define buffer of 2 around these points
pt_inters
<- c(test_bnd, test_brk) %>%
st_intersection() %>%
st_collection_extract(type =
"POINT") %>%
st_buffer(2)
#
plot(test_bnd)
plot(test_brk,
add = TRUE)
plot(pt_inters,
add = TRUE)
#
split breaklines
test_brk_unique
<- st_difference(st_union(test_brk), st_union(pt_inters))
plot(test_bnd)
plot(test_brk_unique,
add = TRUE)
#
create mesh
tin_obj
<- tin(list(boundary = test_bnd, breaklines = test_brk_unique), s = 2, s_brk
= 2, a = 4, q = 30)
plot(tin_obj,
pch = ".")
#
#
https://rdrr.io/cran/lawn/man/lawn_tin.html
#
https://search.r-project.org/CRAN/refmans/telemac/html/tin.html
#
akima interpolation
#
https://cran.r-project.org/web/packages/akima/akima.pdf
#
CLimat Data Tools from IRI : EARTH INSTITUT COLOMBIA UNIVERSITY (USA)
#
Regrid Spatial Data
#
install_github("rijaf-iri/CDT")
library(CDT)
startCDT()
#
#
https://iri.columbia.edu/~rijaf/CDTUserGuide/html/regrid_spatial_data.html
#https://iri.columbia.edu/~rijaf/CDTUserGuide/html/cdt_file_menu.html#Starting_CDT_
#
https://github.com/rijaf-iri/CDT
#
#
ESMValTool ?
#
Climate Downscaling Tool (ClimateDT) is a geo-web service aimed at downscaling
a large number of climatic variables and indexes from multiple climatic
scenarios.
#
#
https://www.ibbr.cnr.it/climate-dt/?id=ehvts8oh47o3i7p1mqphhvlof1
#
CLIMDEX.PCIC
#
https://www.pacificclimate.org/resources/software-library
#
CSTools EnsClustering : https://www.youtube.com/watch?v=9fs_1FNnLJM
#
https://www.youtube.com/watch?v=j1Ab3dLkmM0
#
#
Other Programs
require(transformeR)
require(climate4R.datasets)
library(downscaleR)
require(seas)
library("devtools")
devtools::install_github('leerichardson/sdsmR')
#
devtools::install_github("pacificclimate/ClimDown",
ref="release")
#
devtools::install_github("pacificclimate/ClimDown",
ref="1.0.1")
require(climdex.pcic)
#
fname
<- system.file("extdata", "GF_2050s_precip.OUT",
package="seas")
gf50
<- read.sdsm(fname)
gf50.ss
<- seas.sum(gf50, var=paste("V", 1:20, sep=""),
name="Grand Forks")
#
analysis
image(gf50.ss,
var="V1")
image(gf50.ss,
var="V2")
image(gf50.ss,
var="V3")
#
writing
data(mscdata)
hj
<- mksub(mscdata, id=2100630)
fname
<- paste(tempdir(), "HJ_Obs_prcp.DAT", sep="/")
write.sdsm(hj,
"precip", 1961, 2000, fname)
read.sdsm(file,
start = 1961, end = 2000, calendar)
#
require(transformeR)
require(climate4R.datasets)
data(NCEP_Iberia_hus850,
NCEP_Iberia_ta850)
x
<- makeMultiGrid(NCEP_Iberia_hus850, NCEP_Iberia_ta850)
x
<- subsetGrid(x, years = 1985:1995)
#
Loading predictands
data("VALUE_Iberia_pr")
y
<- VALUE_Iberia_pr
y
<- getTemporalIntersection(obs = y, prd = x, "obs")
x
<- getTemporalIntersection(obs = y, prd = x, "prd")
#
... kfold in 3 parts equally divided ...
pred
<- downscaleCV(x, y, folds = 3, sampling.strategy =
"kfold.chronological",
scaleGrid.args = list(type
= "standardize"),
method = "GLM",
family = Gamma(link = "log"), condition = "GT", threshold =
0,
prepareData.args = list(
"spatial.predictors" = list(which.combine = getVarNames(x),
v.exp = 0.9)))
#
... kfold by years ...
pred
<- downscaleCV(x,y,sampling.strategy = "kfold.chronological",
method = "GLM",
condition = "GT", threshold = 0,
scaleGrid.args = list(type
= "standardize"),
folds =
list(c(1985,1986,1987,1988),
c(1989,1990,1991,1992),
c(1993,1994,1995)))
#
... leave one year out ...
pred
<- downscaleCV(x,y,sampling.strategy = "leave-one-year-out",
method = "GLM",
condition = "GT", threshold = 0,
scaleGrid.args = list(type
= "standardize"))
#
Reconstructing the downscaled serie in 3 folds with local predictors.
pred
<- downscaleCV(x,y,folds = 3, sampling.strategy =
"kfold.chronological",
scaleGrid.args = list(type
= "standardize"),
method = "GLM",
condition = "GT", threshold = 0,
prepareData.args =
list("local.predictors" = list(vars = "hus@850", n = 4)))
#
https://rdrr.io/github/SantanderMetGroup/downscaleR/man/downscaleCV.html
#
library(downscaleR)
data("VALUE_Iberia_tas")
# illustrative datasets included in transformeR
y
<- VALUE_Iberia_tas
data("NCEP_Iberia_hus850",
"NCEP_Iberia_psl", "NCEP_Iberia_ta850")
x
<- makeMultiGrid(NCEP_Iberia_hus850, NCEP_Iberia_psl, NCEP_Iberia_ta850)
#
#
calculating predictors
data
<- prepareData(x = x, y = y,spatial.predictors = list(v.exp = 0.95))
#
Fitting statistical downscaling methods (simple case, no cross-validation)
analog
<- downscale.train(data, method = "analogs", n.analogs = 1)
regression
<- downscale.train(data, method = "GLM",family = gaussian)
neuralnet
<- downscale.train(data, method = "NN", hidden = c(10,5), output =
"linear")
#
#
Extracting the results for a particula station (Igueldo) for a single year
(2000)
igueldo.2000
<- subsetGrid(y,station.id = "000234",years = 2000)
analog.2000
<- subsetGrid(analog$pred,station.id = "000234",years = 2000)
regression.2000
<- subsetGrid(regression$pred,station.id = "000234",years = 2000)
neuralnet.2000
<- subsetGrid(neuralnet$pred,station.id = "000234",years = 2000)
#
library(visualizeR) # Data visualization utilities
temporalPlot(igueldo.2000,
analog.2000, regression.2000, neuralnet.2000)
#
#
https://www.jstatsoft.org/article/view/v086c03
#
https://gmd.copernicus.org/articles/13/1711/2020/
#
#
library('qmap')
cbind(names(data))
#
obsprecip=
na.omit(pr_OBS)
modprecip=
na.omit(pr_CanESM5_ssp585)
#
qm5.fit
<- fitQmap(obsprecip,modprecip,qstep=0.01, method="SSPLIN")
qm5
<- doQmap(modprecip,qm5.fit)
write.table(qm5,
"data.txt",col.names=FALSE,row.names=FALSE);getwd()
#
data(obsprecip)
data(modprecip)
##===
call to fitQmapPTF and doQmapPTF
qm1.fit
<- fitQmap(obsprecip,modprecip, method="PTF",
transfun="expasympt",cost="RSS",wett.day=TRUE)
qm1
<- doQmap(modprecip,qm1.fit)
##===
call to fitQmapDIST and doQmapDIST
qm2.fit
<- fitQmap(sqrt(obsprecip),sqrt(modprecip),
method="DIST",qstep=0.001, transfun="berngamma")
qm2
<- doQmap(sqrt(modprecip),qm2.fit)^2
##===
call to fitQmapRQUANT and doQmapRQUANT
qm3.fit
<- fitQmap(obsprecip,modprecip,
method="RQUANT",qstep=0.01)
qm3
<- doQmap(modprecip,qm3.fit,type="linear")
##===
call to fitQmapRQUANT and doQmapRQUANT
qm4.fit
<- fitQmap(obsprecip,modprecip,
method="QUANT",qstep=0.01)
qm4
<- doQmap(modprecip,qm4.fit,type="tricub")
##===
call to fitQmapSSPLIN and doQmapSSPLIN
qm5.fit
<- fitQmap(obsprecip,modprecip,qstep=0.01,
method="SSPLIN")
qm5
<- doQmap(modprecip,qm5.fit)
sqrtquant
<- function(x,qstep=0.001){
qq <- quantile(x,prob=seq(0,1,by=qstep))
sqrt(qq)
}
op <- par(mfrow=c(1,3))
for(i
in 1:3){
plot(sqrtquant(modprecip[,i]),
sqrtquant(obsprecip[,i]),pch=19,col="gray",
main=names(obsprecip)[i])
lines(sqrtquant(modprecip[,i]), sqrtquant(qm1[,i]),col=1)
lines(sqrtquant(modprecip[,i]), sqrtquant(qm2[,i]),col=2)
lines(sqrtquant(modprecip[,i]), sqrtquant(qm3[,i]),col=3)
lines(sqrtquant(modprecip[,i]), sqrtquant(qm4[,i]),col=4)
lines(sqrtquant(modprecip[,i]), sqrtquant(qm5[,i]),col=5)
}
#
legend("topleft",
legend=c("PTF","DIST","RQUANT","QUANT","SSPLIN"),
lty=1, col=1:5); par(op)
#
#
create sample data
sample_Data
= rnorm(500)
CDF
<- ecdf(sample_Data ) # calculate CDF
plot(
CDF ) # draw the cdf plot
#
require(HDInterval)
inverseCDF(c(0.025,
0.975), pgamma, shape=2.5, rate=2) # 95% interval
#
#
https://app.datacamp.com/workspace/preview?_tag=rdocs&rdocsPath=packages%2Fqmap%2Fversions%2F1.0-4%2Ftopics%2FfitQmap&utm_source=r-docs&utm_medium=docs&utm_term=fitQmap&utm_content=run_example_in_workspace
#
#|-
Empirical Quantile Mapping (EQM) statistical downscaling method Wetterhall and
his colleagues (2012)
#
Non-parametric quantile mapping using empirical quantiles.
#
library('qmap')
cbind(names(data))
#
obsprecip=
na.omit( pr_OBS )
modprecip=
na.omit( pr_CanESM5_ssp119 )
#
#qm.fit1
<- fitQmapQUANT(obsprecip,modprecip,
qstep=0.1,nboot=1,wet.day=TRUE)
#qm.s <-
doQmapQUANT(modprecip,qm.fit1,type="tricub")
#write.table(qm.s
, "data.txt",col.names=FALSE,row.names=FALSE);getwd()
#
qm.fit
<- fitQmapRQUANT(obsprecip,modprecip,
qstep=0.1,nboot=10,wet.day=TRUE)
qm.b <-
doQmapRQUANT(modprecip,qm.fit,type="tricub")
write.table(qm.b,
"data.txt",col.names=FALSE,row.names=FALSE);getwd()
#
#
https://cran.r-project.org/web/packages/qmap/qmap.pdf
# DELTA
EST MEILLEUR QUE QM et EQM sur SDGCM tools :
https://www.researchgate.net/publication/317370485_Scaled_distribution_mapping_A_bias_correction_method_that_preserves_raw_climate_model_projected_changes
#
https://agrimetsoft.com/statistical%20downscaling
#
https://rdrr.io/cran/CSTools/man/RainFARM.html
#
https://cran.r-project.org/web/packages/rainfarmr/readme/README.html
#
#
A Novel Semi-parametric Quantile Mapping Method [SPQM]: Precipitation Bias
Correction:
#
QM ( Standard Quantile Mapping )
#
DQM ( Detrended Quantile Mapping )
#
QDM ( Quantile Delta Mapping )
#
SDM ( Scale Distribution Mapping )
#
UQM ( Unbiased Quantile Mapping )
#
#| Calibration of daily forecast time
series
#
devtools::install_github("jonasbhend/biascorrection",
build_vignettes=TRUE)
library(biascorrection)
require(fitdistrplus)
require(MASS)
require(survival)
require(easyVerification)
require(SpecsVerification)
ls(pos='package:biascorrection')
list_methods()
#
Calibration (bias correction)
require(climQMBC)
# install depuis Packages icones.winrar
#
var
<- 1
if
(var==1){
agg <- sum
var_txt <- 'pp'
}
else {
var_txt <- 'tmax'
agg <- mean
}
#
obs
<- matrix(data$pr_OBS, ncol = 1)
mod
<- matrix(data$pr_CanESM5_hist, ncol = 1)
rep_series
<- report(obs,mod,var1)
#
#|Quantile
Mapping (QM) statistical downscaling method Panofsky and Brier (1968)
library('qmap')
cbind(names(data))
obsprecip=
na.omit(pr_OBS)
modprecip=
na.omit(pr_CanESM5_ssp585)
qm5.fit
<- fitQmap(obsprecip,modprecip,qstep=0.01, method="SSPLIN")
qm5
<- doQmap(modprecip,qm5.fit)
write.table(qm5,
"data.txt",col.names=FALSE,row.names=FALSE);getwd()
#
#|-
Empirical Quantile Mapping (EQM) statistical downscaling method Wetterhall and
his colleagues (2012)
#
Non-parametric quantile mapping using empirical quantiles.
library('qmap')
cbind(names(data))
obsprecip=
na.omit( pr_OBS )
modprecip=
na.omit( pr_CanESM5_ssp119 )
#qm.fit1
<- fitQmapQUANT(obsprecip,modprecip,
qstep=0.1,nboot=1,wet.day=TRUE)
#qm.s <-
doQmapQUANT(modprecip,qm.fit1,type="tricub")
#write.table(qm.s
, "data.txt",col.names=FALSE,row.names=FALSE);getwd()
qm.fit
<- fitQmapRQUANT(obsprecip,modprecip,
qstep=0.1,nboot=10,wet.day=TRUE)
qm.b <-
doQmapRQUANT(modprecip,qm.fit,type="tricub")
write.table(qm.b,
"data.txt",col.names=FALSE,row.names=FALSE);getwd()
#*#
A Novel Semi-parametric Quantile Mapping Method [SPQM]: Precipitation Bias
Correction:
#
OLD VERSION ClimDown ~ netcdf
#
install.packages('devtools')
#
devtools::install_github("pacificclimate/ClimDown",
ref="release",force=TRUE)
#
devtools::install_github("pacificclimate/ClimDown",
ref="1.0.1", force=TRUE)
#|
CI: 1 core, 10 GB RAM, Run time ~
7 hours
#|
CA: 8 cores, 10 GB RAM, Run time ~
2 hours
#|
QDM: 1 core, 36 GB RAM, Run time ~
1.5 days
#|
rerank: 4 cores, 8 GB RAM, Run time ~
4 days
#
require(ClimDown)
library(doParallel)
library(ncdf4)
library(cmsaf)
library(hydroGOF)
setwd("C:/Users/hp/Downloads/inputtest")
dir()
ncpath
<- "C:/Users/hp/Downloads/inputtest/obs.file/"; setwd(ncpath)
ncname
<- "MM_pr_observed"
ncfname
<- paste(ncpath, ncname, ".nc", sep="")
ncin
<- nc_open(ncfname);class(ncin)
print(ncin)
attributes(ncin$dim)
attributes(ncin$var)
rstack<-
stack("MM_pr_observed.nc")
writeRaster(rstack,
"rstack.nc", overwrite=TRUE, format="CDF", varname="Temperature",
varunit="degC",
longname="Temperature --
raster stack to netCDF", xname="X",
yname="Y",zname="nbands",
zunit="numeric")
#
gcm.file
= "C:/Users/hp/Downloads/inputtest/gcm.file"
obs.file
= "C:/Users/hp/Downloads/inputtest/obs.file"
bccaq.netcdf.wrapper(gcm.file,
obs.file, out.file, varname = "pr")
ci.netcdf.wrapper(gcm.file,
obs.file, output.file, varname = "tasmax")
qdm.netcdf.wrapper(obs.file,
gcm.file, out.file, varname = "tasmax")
#
High-level NetCDF wrapper for Quantile Reranking
rerank.netcdf.wrapper(qdm.file,
obs.file, analogues, out.file,
varname =
"tasmax")
#
ClimDown: PCIC's daily Climate Downscaling library
#
https://ccafs-climate.org/downloads/docs/Climgen-Downscaling-Tyndall.pdf
#
https://towardsdatascience.com/how-to-crack-open-netcdf-files-in-r-and-extract-data-as-time-series-24107b70dcd
#
https://rstudio-pubs-static.s3.amazonaws.com/119121_996c35aea40145d2a37f8def7c9733f5.html
#
https://help.marine.copernicus.eu/en/articles/6328012-how-to-convert-netcdf-to-csv-using-r
#
https://gis.stackexchange.com/questions/287873/extracting-#netcdf-climate-data-for-multiple-locations-at-different-#times-using-r
#
https://towardsdatascience.com/how-to-crack-open-netcdf-files-in-r-and-extract-data-as-time-series-24107b70dcd
#
https://pjbartlein.github.io/REarthSysSci/netCDF.html
#
https://github.com/RS-eco/processNC
#
https://cran.r-hub.io/web/packages/ClimDown/ClimDown.pdf
#
7– PERFORM ANALYSES ON THE PIXEL:
data = read.table(file("clipboard"),header=TRUE,
sep="\t", dec=".", row.names=1)
str(data); dim(data)
data1 = as.matrix(data)
# Drought Index SPI/SPEI
require(SPEI)
i = 3
spi <- spi(ts(na.omit(data1), frequency=12, start=1981), i )
write.table(spi$fitted,
"spi31.txt",col.names=FALSE, row.names=FALSE); getwd()
#
# ETCDDI Pixels :
library(lubridate)
library(dplyr)
library(zoo)
date= seq(as.Date("1980/1/1"),
as.Date("2021/12/31"), "day"); class(date)
data1=data.frame(data,date)
data1 <- data1 %>%
mutate(Year =
year(date), Month = month(date))
#| CDD. Maximum
length of dry spell: # format "%Y"
"%Y-%m"
# Calculate consecutive days with precipitation <
1mm
data1 <- data1 %>%
group_by(month
= format(date, "%Y")) %>%
mutate(consecutive_rainy_days = cumsum(OBSVERTION < 1)) %>%
ungroup()
# Calculate the maximum consecutive days with
precipitation < 1mm by month
min_consecutive_days <- data1 %>%
group_by(month) %>%
summarise(min_consecutive_days = max(consecutive_rainy_days )) %>%
ungroup()
print(min_consecutive_days)
data1 %>%
mutate(Month =
month(date)) %>%
group_by(Month, OBSVERTION) %>%
summarise(consecutive_days = cumsum(OBSVERTION >= 1)) %>%
group_by(Month) %>%
summarise(max_consecutive_days = max(consecutive_days))
#
data1 %>%
mutate(Year =
year(date)) %>%
group_by(Year,
OBSVERTION) %>%
summarise(consecutive_days = cumsum(OBSVERTION < 1)) %>%
group_by(Year)
%>%
summarise(max_consecutive_days = max(consecutive_days))
#
#| CWD. Maximum length of wet spell RR ??? 1mm : #
format "%Y" "%Y-%m"
# Function to count consecutive days with
precipitation >= 1 mm
count_consecutive_rainy_days <- function(df) {
consecutive_rainy_days <- 0
max_consecutive_rainy_days <- 0
for (i in
1:nrow(df)) {
if
(df$OBSVERTION[i] >= 1) {
consecutive_rainy_days <- consecutive_rainy_days + 1
max_consecutive_rainy_days <- max(max_consecutive_rainy_days,
consecutive_rainy_days)
} else {
consecutive_rainy_days <- 0
}
}
return(max_consecutive_rainy_days)
}
# Calculate the maximum consecutive rainy days for
each month over two years
result <- data1 %>%
mutate(Year =
format(date, "%Y"), Month = format(date, "%m")) %>%
group_by(Year,
Month) %>%
summarize(MaxConsecutiveRainyDays = count_consecutive_rainy_days(.))
# Print the result
print(result)
#| R95pTOT.
Annual total PRCP when RR > 95p: # format "%Y" "%Y-%m"
filtered_data <- data1 %>%
filter(OBSVERTION >= 1.0)
# Extract month from the Date column
filtered_data$Month <-
format(as.Date(filtered_data$date), "%Y-%m")
#
percentile_data <- filtered_data %>%
group_by(Month) %>%
summarize(Percentile_95 = quantile(OBSVERTION, 0.95, na.rm = TRUE))
print(percentile_data)
#| PRCPTOT. Annual total precipitation in wet
days: # format "%Y" "%Y-%m"
monthly <- data1 %>%
group_by(Year,
Month) %>%
summarize(Sum_Precipitation = sum( OBSVERTION )); print(monthly)
#
# HCA PIXEL
require(sf);shpe <-
read_sf("C:/Users/hp/Documents/ArcGIS Input/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
require(readOGR);shpe <-
readOGR("C:/Users/hp/Documents/ArcGIS Input/Djibouti for mask/MASK
SHAPEFILE Djibouti/Regions.shp")
#
require(raster);crs(shpe)
pre1.brick =
brick("chirps-v2.0.1981.days_p05.nc")
crs(pre1.brick)
shp = spTransform(shpe, crs(pre1.brick))
# pre1.mask = mask(pre1.brick, shp)
pre1.mask = crop(pre1.brick, shp)
class(pre1.mask )
dim(pre1.mask )
#
brk <- c(0,0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75 , 2
,2.25);length(brk)
require(Hmisc); col1 <-
colorRampPalette(c("blue4","blue","cyan","green",
"yellow", "red", "red4"))(50)
x11();plot( pre1.mask[[1]], xlim=c(41.5, 43.5),ylim=c(
10.75 , 12.75), col= col1 )
plot(shpe[4],add=TRUE,col=rgb(0, 1, 0, 0.005),
lwd=2,lty=2)
#
dim(pre1.mask )
pre1.df = as.data.frame(pre1.mask[[1]], xy=TRUE)
pre1.df = pre1.df[complete.cases(pre1.df),]
# head(pre1.df[pre1.df!= "NA"])
View(head(pre1.df,5)) ; View(tail(pre1.df,5))
dim(pre1.df)
names(pre1.df)
#
pre1.df = as.data.frame(pre1.mask, xy=TRUE)
pre1.df = pre1.df[complete.cases(pre1.df),]
head(pre1.df); tail(pre1.df)
dim(pre1.df)
View(head(t(pre1.df),5))
dim(t(pre1.df))
#
pre1.df.tim = t(pre1.df)
# pre1.df.tim =
pre1.df.tim[-c(1:2),];View(pre1.df.tim)
View(head(pre1.df.tim,5))
dim(pre1.df.tim)
pre1.df.time = pre1.df.tim[-c(1:2),];
dim(pre1.df.time); View(head(pre1.df.time ,5))
#
require(lubridate); require(dplyr)
df <- data.frame(date =
seq(as.Date("1981/1/1"), as.Date("1981/12/31"),
"day") )
df$week <- floor_date(df$date, "week")
df$month <- floor_date(df$date, "month")
df$year <- floor_date(df$date, "year")
View(df)
dim(df)
#
data1 = data.frame(df,pre1.df.time )
dim(data1)
View(head(data1,5))
data1 %>%
group_by(month) %>%
summarize(sum
= sum(data1[,5]))
#
write.table(pre1.df.tim,
"pixel.txt");getwd()
library(NbClust)
x11();(NbClust( pre1.df.time , diss=NULL, distance =
"euclidean", min.nc=2, max.nc=10,
method = "ward.D2", index = "all", alphaBeale =
0.1))
#
library (cluster)
library (factoextra)
pam.res3 <- pam(pre1.df.time, 3 , metric = "euclidean", stand =
FALSE); pam.res3$silinfo$avg.width
x11();fviz_cluster(pam.res3, data = data, palette =
c("red", "blue", "green"), ellipse.type =
"euclid", star.plot =
TRUE, repel = TRUE, ggtheme
=theme_test() )
x11();fviz_silhouette(pam.res3, palette =
c("red", "blue", "green") ,ggtheme = theme_test()
)
source(url("https://raw.githubusercontent.com/larmarange/JLutils/master/R/clustering.R"))
best.cutree(hclust(dist(scale(pre1.df.time), method =
"euclidean") , method = "ward.D2"))
#
hc <- hclust(dist(scale(pre1.df.time)), method =
"ward.D2")
x11(); fviz_dend(hc,lwd = 1, k = 3
,rect_lty = 5,rect_border = "black",cex=0.35, k_colors =
c("red","blue","green","brown","pink",
"cyan","purple","yellow","orange","black"),horiz=FALSE,labels_track_height
= 2,
ggtheme = theme_light(),
ylab="Euclidean Distance" ) +
theme(axis.text.x = element_text(face = "bold", color =
"black",size = 11, angle = 0),
axis.text.y = element_text(face = "bold", color =
"black", size = 11, angle = 0))
#
gr=cutree(hc,k= 3
); cbind(gr)
G = subset(gr, gr %in% c( 5
))
write.table(G, "G.txt",col.names=FALSE,
row.names=TRUE); getwd()
#
#
https://stackoverflow.com/questions/50204653/how-to-extract-the-data-in-a-netcdf-file-based-on-a-shapefile-in-r
# netCDF in R:
https://pjbartlein.github.io/REarthSysSci/netCDF.html
"Have fun!
If you find any errors or codes that do not work,
please feel free to write to me at abdi-basid@outlook.com."
Abdi-Basid ADAN
Aucun commentaire:
Enregistrer un commentaire