samedi 24 août 2024

Multi- and Hyperspectral Image Processing for Vegetation Index Calculation (GRVI, MVI and NDVI) from Remote Sensing (SPOT, SENTINEL-2 and MODIS) Data Using the R Programming Language

 Abdi-Basid ADAN

"The purpose of this document is to consolidate and improve the various R scripts used to perform the cited analyses."

 

 

Table of Contents

# 1- R PROGRAM FOR CALCULATING VEGETAION INDEX : 2

# 2- IMPORT MULTIPLE RASTER FILES SIMULTANEOUSLY: 9

# 3 - TREND AND P-VALUE: KENDALL VS OLS IN RASTER FILES : 14

 

 

 

 

 

 

 

 

 

 

 

 

 

# 1- R PROGRAM FOR CALCULATING VEGETAION INDEX :     

 

# NDVI-SPOT: B3-B2  NDVI-SENTINEL2: B8-B4  NDVI-MODIS: B2-B1

 

library(maptools)

library(raster)

library(rgdal)

library(dplyr)

library(ggplot2)

library(rgeos)

library(RColorBrewer)

library(terra)

library(stars)

library(sf)

library(tabularaster)

#

# Import the RASTER data in R:

rast=brick("C:\\Users\\hp\\Desktop\\20130102.TIF") # "path\\name.extension"

nlayers(rast) #Number of Layers or Band

#

naip_multispectral_st <- stack("C:/Users/hp/Desktop/tif1.TIF")

#

# convert data into rasterbrick for faster processing

naip_multispectral_br  <- brick(naip_multispectral_st)

#---------Test Calculate GRVI

naip_ndvi  <- (naip_multispectral_br[[1]]  - naip_multispectral_br[[2]]) /(naip_multispectral_br[[1]] + naip_multispectral_br[[2]])

#

setwd("C:/Users/hp/Desktop")

bf <- writeRaster(naip_ndvi, filename=file.path("19870722.tif"),

                  options="INTERLEAVE=BAND", overwrite=TRUE)

# RADIANCE CORRECTION

naip_multispectral_br[[1]]=naip_multispectral_br[[1]]/0.85097

naip_multispectral_br[[2]]=naip_multispectral_br[[2]]/0.81218

naip_multispectral_br[[3]]=naip_multispectral_br[[3]]/0.98580

#

# EXOATMOSPHERIC CORRECTION 1570.2           1052.1 235.84           # 153.8

naip_multispectral_br[[1]]=(naip_multispectral_br[[1]]*pi)/(cos(26.2)*1570.2)

naip_multispectral_br[[2]]=(naip_multispectral_br[[2]]*pi)/(cos(26.2)*1052.1)

naip_multispectral_br[[3]]=(naip_multispectral_br[[3]]*pi)/(cos(26.2)*235.84)

 

# calculate SPOT-NDVI using the red (band 3) and nir (band 2) bands

naip_ndvi  <- (naip_multispectral_br[[3]]  - naip_multispectral_br[[2]]) /  (naip_multispectral_br[[3]] +  naip_multispectral_br[[2]])

#naip_grvi  <- (naip_multispectral_br[[1]]  - naip_multispectral_br[[2]]) /   (naip_multispectral_br[[1]] +  naip_multispectral_br[[2]])

#

band1=raster(rast,layer=1)

band2=raster(rast,layer=2)

GRVI=(band1-band2)/(band1+band2)

plot(GRVI)

x11();plot(GRVI, col=colorRampPalette(1:10)(7),main="GRVI")

dev.off()

#

# plot the data

x11(); plot(naip_ndvi,

     main = "NDVI-03-10-1987 \n Godoria",

     axes = T, box = T, xlim=c(2210,2500), ylim=c(2290,2690))

# xlim=c(1000,1150),ylim=c(1950,2350)

plotRGB(naip_ndvi,r=1,g=2,b=3,axes=TRUE,stretch="lin", main="")

# https://rdrr.io/cran/raster/man/calc.html

#

library(stars)

st_bbox(naip_ndvi)

#https://www.pmassicotte.com/posts/2021-03-06-extracting-raster-values-using-polygons/

#

#Extraction Vlaue

xy=data.frame()

etract()

radiusextract<-function(lon,lat,naip_ndvi,values=0.5){

  coor<-coordinates( naip_ndvi)

  lonpar<-coor[,1][which(abs(coor[,1]-lon)%in%min(abs(coor[,1]-lon)) )]

  latpar<-coor[,2][which(abs(coor[,2]-lat)%in%min(abs(coor[,2]-lat)) )]

  data<-data.frame(x=coor[,1], y=coor[,2], val=values( naip_ndvi))

  return(select.window(xf=lonpar[1], yf=latpar[1], radius=values, xydata=data))}

res<-radiusextract(lon=2210, lat=2500, naip_ndvi, radius=0.167)

# https://rfunctions.blogspot.com/2017/08/extracting-data-from-rasters-using.html

# https://www.youtube.com/watch?v=LlA9u

# https://stackoverflow.com/questions/27562076/if-raster-value-na-search-and-extract-the-nearest-non-na-pixel

#

#Export in Raster /NETCDF

getwd(); setwd("C:/Users/hp/Desktop/")

bf <- writeRaster(naip_ndvi, filename=file.path("TestGodoria1987.tif"),

                  options="INTERLEAVE=BAND", overwrite=TRUE)

#

# Export your raster or Export from Image

writeRaster(x = naip_ndvi1,

            filename="C:/Users/hp/Desktop/EXPORT",

            format = "GTiff", # save as a tif

            datatype='INT2', # save as a INTEGER rather than a float

            overwrite = TRUE)  # OPTIONAL - be careful. This will OVERWRITE previous files.

# https://www.earthdatascience.org/courses/earth-analytics/multispectral-remote-sensing-data/vegetation-indices-NDVI-in-R/

# https://www.rdocumentation.org/packages/raster/versions/3.6-3/topics/writeRaster

#

#GEOREFERENCER SOUS R

raster::crs(naip_grvi ) <- "+proj=utm +zone=38 +ellps=WGS84 +datum=WGS84"

s.sf.gcs <- st_transform(s.sf, "+proj=longlat +datum=WGS84")

st_set_crs(naip_grvi, "+proj=utm +zone=38 +ellps=WGS84 +datum=WGS84")

# write to netcdf

if (require(ncdf4)) {

  rnc <- writeRaster(r, filename=file.path(tmp, "netCDF.nc"), format="CDF", overwrite=TRUE)  

}

#https://search.r-project.org/CRAN/refmans/raster/html/writeRaster.html

#

# Statistic Raster

#r <- raster(ncol=5, nrow=5, vals=1:25)

#r[] <- runif(ncell(r)) * 10

# wd <- ("/") # C:/Users/hp/Desktop/

# DEM <- raster(paste0(wd, "NAME.tif"))

# e=GDALinfo(paste0(wd, "godoria.tif"))

# https://rspatial.org/analysis/index.html

#

wd <- ("C:/Users/hp/Desktop/")

setwd(wd); getwd()

DEM <- raster(paste0(wd,"1987-07-22.tif"))

#

print(DEM)

DEM@extent

DEM@crs

crs(DEM) #XY-Coordinates Reference System

res(DEM) #Spatial Resolution

#

# Import Shape File

# SurveyLocs <- raster::shapefile("../Spatial_Layers/hbef_valleywideplots.shp")

#

#Statistics Raster

library(maptools);library(raster);library(rgdal);library(dplyr);library(ggplot2);library(rgeos);library(RColorBrewer);library(terra);library(stars);library(sf);library(tabularaster)

#

# Aggregate or Disaggregate:

wd <- ("C:/Users/hp/Desktop/")

DEM <- raster(paste0(wd,"CHIRPS_Djib.tif"))

res(DEM); extent(DEM)

DEM.aggregate <- aggregate(DEM, fact=2)

res(DEM.aggregate)

DEM1=raster(nrow=37,ncol=35)

DEM1=setValues(DEM1,values = 1:1295)

r_resam <- resample(DEM,DEM1,method='bilinear')

r_agg <- aggregate(DEM1,fact=1,fun=mean)

#

as_tibble(DEM)

summary(DEM)

cbind(table(values(DEM)))

#

cellStats(DEM, range)

cellStats(DEM, mean)

quantile(DEM,0.50)

cellStats(DEM,sd)

cellStats(DEM,'sum')

cellStats(DEM, 'countNA')

#

setMinMax(DEM)

maxValue(DEM)

minValue(DEM)

View(DEM)

dim(as.matrix(DEM))

#

sum(!is.na(r))

colSums(!is.na(r))

#

# Convert Raster into Table

r=as.matrix(DEM)

x=20;y=20;round(((x*y*round(sum(cbind(table((r[r>0])))),3))*0.0001),3) # hectare ha

#

wd <- ("C:/Users/hp/Desktop/r/"); DEM <- raster(paste0(wd,"0220204_Correcteed.tif"));r=as.matrix(DEM)

#

round(sum(cbind(table((r[r>0])))),3)

round(sum(cbind(table((r[r>0.05])))),3)

round(cellStats(DEM, range),3) # round(min(r),3) # round(max(r),3)

round(median(r,na.rm=TRUE),3)

round(mean(r,na.rm=TRUE),3)

round(sd(r,na.rm=TRUE),3)

#

# Degradation/Stable/Recovery

round(sum(cbind(table((r[r>0  & r <= 0.4])))),3)

round(sum(cbind(table((r[r>=0.5  & r <= 0.59])))),3)

round(sum(cbind(table((r[r>=0.6  & r <= 1])))),3)

#

naip_multispectral_st <- stack("C:/Users/hp/Desktop/20121120.TIF");naip_multispectral_br  <- brick(naip_multispectral_st)

naip_grvi  <- (naip_multispectral_br[[3]]  - naip_multispectral_br[[2]]) /   (naip_multispectral_br[[4]]  -  naip_multispectral_br[[1]])

#raster::crs(naip_grvi ) <- "+proj=utm +zone=38 +ellps=WGS84 +datum=WGS84"

setwd("C:/Users/hp/Desktop/");writeRaster(naip_grvi, filename=file.path("MVI.tif"),  options="INTERLEAVE=BAND", overwrite=TRUE)

# rm(list=ls())

# Cat1=index1[index1 >=2 ]

# Cat2=index1[index1 >= 1.50 & index1 <= 1.99]

# which(r < 1, arr.ind = TRUE)

# Legend/Crop raster in r: https://www.youtube.com/watch?v=-Xjlsztbt0Y

# EXTRACT /https://cran.r-project.org/web/packages/tabularaster/vignettes/tabularaster-usage.html

# Classify: https://bookdown.org/lexcomber/brunsdoncomber2e/Ch5.html

# https://rspatial.org/spatial/9-maps.html

#

# Relative Radiometric Normalization RNN of multisensor Landsat Level 2 images using R:

# https://www.youtube.com/watch?v=Dhzy04f_AyE

#sum, min, max, sd, mean, or countNA

#Plotting Raster

plot(DEM)

image(DEM)

image(DEM, zlim=c(12,43))

plotRGB(rast,r=1,g=2,b=3,axes=TRUE,stretch="lin", main="")

# https://rspatial.org/spatial/8-rastermanip.html

#

# RASTER cREATION

## scratch a raster and fill some random values

r <- raster(nrows=22, ncols=20, xmn=-58, xmx=-48, ymn=-33, ymx=-22)

r[] <- round(runif(22 * 20, min=0, max=600), digits=0)

#

# Count raster cells (excluding NA's) and set intervals

ncells <- sum(!is.na(r[]))

intervals <- seq(0, 600, 100)

#

# Count frequencies and calculate percentage

t <- table(cut(as.vector(r), intervals, include.lowest=TRUE)) / ncells * 100

df <- data.frame(round(t, digits=2))

#

# r <- raster(ncols=36, nrows=18)

r <- raster(paste0(wd,   "1987-10-03.tif"))

values(r) <- 1:ncell(r)

# multiply values CORRECTION

fun <- function(x) { x * 10 }

rc1 <- calc(r, fun)

#https://rdrr.io/cran/raster/man/extract.html

#

#Remove from r all values that are NA in w.

u <- mask(r, w)

as.matrix(u)

#Identify the cell values in u that are the same as in s.

v <- u==s

as.matrix(v)

#Replace NA values in w with values of r.

cvr <- cover(w, r)

as.matrix(w)

# https://rspatial.org/spatial/8-rastermanip.html

# https://www.neonscience.org/resources/learning-hub/tutorials/extract-values-rasters-r

#

# IMPORTATION NETCDF & EXTRACTION DIMENSIONS AND VARIABLES:

setwd("C:/Users/hp/Downloads")

f=nc_open("icru4_pre_0-360E_-90-90N_n.nc")

print(f) #Show Dimensions and Variables

#

lat=ncvar_get(f,"dimension")

ts=as.Date(dilens, origin="--", tz="UTC")

# Sources: https://www.youtube.com/watch?v=5KbxWsQbAjs&t=950s

#          https://www.youtube.com/watch?v=4PCpgxtW9wI&t=1035s

#

# Linear regression between two raster images in R

library(raster)

 

timestats <- brick("trend_timestats.tif")

force <- brick("versuch1.tif")

regression <-lm(timestats ~ force)

#

library(terra)

s <- rast(system.file("ex/logo.tif", package="terra"))

m <- lm(red~green, data=as.data.frame(s))

p <- predict(s, m)

error <- s$red - p

plot(error)

# https://gis.stackexchange.com/questions/403811/linear-regression-analysis-in-r

#

library(raster)

s <- stack(NDVI, rainfall)

#

fun=function(x) { if (is.na(x[1])){ NA } else { lm(x[1:120] ~ x[121:240])$coefficients[2] }}

slope <- calc(s, fun)

#

fun=function(x) { if (is.na(x[1])){ NA } else { lm(x[1:120] ~ x[121:240])$coefficients[1] }}

intercept <- calc(s, fun)

#

fun=function(x) { if (is.na(x[1])){ NA } else { m <- lm(x[1:120] ~ x[121:240]);summary(m)$r.squared }}

r.squared <- calc(s, fun)

z = mask(NDVI, rainfall)

s <- stack(z, rainfall)

# https://matinbrandt.wordpress.com/2014/05/26/pixel-wise-regression-between-two-raster-time-series/

#

# Rasterize a shapefile

dmin_raster = rasterize(

  x=admin_sp,                   # shape to rasterize

  y= dnb,                       # example raster to copy properties from

  field = 'NAME_1_numeric'      # numeric field to rasterize

)

# http://rstudio-pubs-static.s3.amazonaws.com/513986_7b9b632db11c40878d9f7b8c119fa6f5.html

 

 

# 2- IMPORT MULTIPLE RASTER FILES SIMULTANEOUSLY:

 

library(terra)

library(raster)

library(microbenchmark)

library(raster)

library(dplyr)

library(purrr)

library(rgdal)

library(sp)

#

getwd()

mypath= "C:/Users/hp/Downloads"

rastlist <- list.files(path = "C:/Users/hp/Downloads", pattern='.tif$', all.files= T, full.names= T)

# rastlist <- list.files(path = "C:/Users/hp/Downloads", pattern='.TIF$', all.files=TRUE, full.names=FALSE)

r_name <- list.files(mypath,full.names = F)

# mb <- microbenchmark(stk1 <- terra::rast(rastlist),stk2 <- raster::stack(rastlist), unit = "s",times= 3) ; mb

stk1 <- terra::rast(rastlist)

lapply(rastlist, raster)

allrasters <- stack(rastlist)

#

nrow(allrasters )

ncol(allrasters)

crs(allrasters)

ncell(allrasters)

nlayers(allrasters)   # nombre de raster

extent(allrasters)    # coordonnées disponibles

View(coordinates(allrasters))

coordinates(allrasters)[!is.na(values(allrasters)),]

SpatialPixels(SpatialPoints(coordinates(allrasters)[!is.na(values(allrasters)),]))

bbox(allrasters)

GridTopology(bbox(allrasters)[,1] + res(allrasters)/2, res(allrasters), dim(allrasters)[2:1])

getGridTopology(as(allrasters, "SpatialGrid"))

#

p <- as(allrasters, 'SpatialPixels')

x11();plot(allrasters[[1]]);points(p)

g <- as(allrasters, 'SpatialGridDataFrame')

#

cellStats(allrasters, stat="mean")

cellStats(allrasters, stat="skew")

quantile(allrasters, probs = c(0.25, 0.50, 0.75))

# coefficient of variation.

(cellStats(allrasters,stat="sd")/cellStats(r,stat="mean")) * 100

#

extract(allrasters, c(11.55,43.15) )

extract(allrasters, SpatialPoints(c(11.55,43.15)), sp = T)

xy=data.frame(c(11.55,43.15))

cbind(extract(allrasters, xy, df = T),xy)

result <- extract(allrasters, c(11.55,43.15), cellnumbers = T)

#

cds1 <- rbind(c(-50,0), c(0,60), c(40,5), c(15,-45), c(-10,-25))

cds2 <- rbind(c(80,20), c(140,60), c(160,0), c(140,-55))

lines <- spLines(cds1, cds2)

v <- extract(allrasters, lines)

polys <- spPolygons(cds1, cds2)

unlist(lapply(v, function(x) if (!is.null(x)) mean(x, na.rm=TRUE) else NA ))

#

e <- extent(12,13,41,44)

extract(allrasters, e)

values <- raster::extract(allrasters, e )

#

df <- data.frame(city = c("Santa Cruz", "Oxford"), lat = c(11.55, 11.55), long = c(43.15, 43.15))

df_sf <- st_as_sf(x = df, coords = c("long", "lat"),  crs = 4326)

values <- raster::extract(allrasters, df_sf )

#

allrasters[[1]]

x11(); plot(allrasters[[1]])

#

rList <-    list()

statList <- list()

for(i in 1:length(rastlist)){

  temp <- raster(rastlist[i])

  rList[[i]] <- values(temp)

  # name

  Name <- r_name[i]

  mx=raster::maxValue(temp)

  mn=raster::minValue(temp)

  avg=raster::cellStats(temp,'mean',na.rm=T)

  stdev=raster::cellStats(temp,'sd',na.rm=T)

  statList[[i]] <- data.frame(Name,mx,mn,avg,stdev)  # create a data.frame to save statistics

}

df <- do.call(rbind.data.frame,statList) ;df            # final data.frame with all statistics

# plotRGB(RGB_stack_HARV, r = 1, g = 2, b = 3)

#

shp<-shapefile("C:/Users/hp/Documents/ArcGIS 10.8/Djibouti for mask/MASK SHAPEFILE Djibouti/Regions.shp")

ext <- extract(allrasters, shp, method='simple')

# Function to tabulate pixel values by region & return a data frame

tabFunc                            <- function(indx, extracted, region, regname) {

  dat                              <- as.data.frame(table(extracted[[indx]]))

  dat$name                         <- region[[regname]][[indx]]

  return(dat)

}

# run through each county & compute a table of the number

# of raster cells by pixel value. ("CODE" is the county code)

tabs <- lapply(seq(ext), tabFunc, ext, shp, "CODE")

# assemble into one data frame

df <- do.call(rbind, tabs) 

# to see the data frame in R

print(df)

# https://gis.stackexchange.com/questions/43707/how-to-produce-spatial-grid-from-raster

# https://journal.r-project.org/archive/2011-1/RJournal_2011-1_Murrell.pdf

# https://www.appsloveworld.com/r/100/44/how-to-efficiently-import-multiple-raster-tif-files-into-r

# https://stackoverflow.com/questions/52746936/how-to-efficiently-import-multiple-raster-tif-files-into-r

# https://uw-madison-datascience.github.io/r-raster-vector-geospatial/05-raster-multi-band-in-r/

# https://www.rdocumentation.org/packages/raster/versions/3.6-20/topics/extract

# https://evansmurphy.wixsite.com/evansspatial/raster-analysis-in-r

#

#

library(rts)

# location of files

path <- system.file("external", package="rts")

# list of raster files:

lst <- list.files(path=path,pattern='.asc$',full.names=TRUE); lst

# creating a RasterStack object

r <- stack(lst)

d <- c("2000-02-01","2000-03-01","2000-04-01","2000-05-01") # corresponding dates to 4 rasters

d <- as.Date(d)

# creating a RasterStackTS object:

rt <- rts(r,d)

# or alternatively you can simply use the list of files as the first argument:

rt <- rts(lst,d); rt

plot(rt)

#

write.rts(rt,"rtfile") # writing n into the working directory

rt <- read.rts("rtfile"); rt # reading nf from the working directory

#

file <- system.file("external/ndvi", package="rts")

ndvi <- rts(file) # read the ndvi time series from the specified file ndvi

ep <- endpoints(ndvi,'years') # extract the end index on each year period

ndvi.y <- period.apply(ndvi,ep,mean) # apply the mean function on each year ndvi.y

#---------

ep <- endpoints(ndvi,'quarters') # extract the end index on each quarter of a year

# a function:

f <- function(x) {

  if (min(x) > 0.5)

    mean(x) else 0

}

ndvi.q <- period.apply(ndvi,ep,f) # apply the function f on each quarter

# https://r-gis.net/?q=rts

#

library(rts)

library(terra)

library('dichromat')

#

path <- system.file("external", package="rts")

lst <- list.files(path=path,pattern="asc$",full.names=TRUE)

r <- rast(lst)

#

d <- c("2000-02-01","2000-03-01","2000-04-01","2000-05-01") # corresponding dates to 4 rasters

d <- as.Date(d)

rt <- rts(r,d)

r <- raster::stack(lst)

rt <- rts(lst,d)

x11();plot(rt)

# NDVI

file <- system.file("external/ndvi", package="rts")

ndvi <- rts(file)

ep <- endpoints(ndvi,years)

ndvi.y <- period.apply(ndvi,ep,mean)

#

# https://cran.r-project.org/web/packages/rts/vignettes/rts.pdf

 

 

 

 

 

# 3 - TREND AND P-VALUE: KENDALL VS OLS IN RASTER FILES :

 

require("PackageName")

require("lubridate")

require("zoo")

require(c("forecast","sp","raster","SDMTools"))

require("Kendall")

require("plyr")

require("plyr")

require("bcp")

require("gtools")

require("phenopix")

require("rts")

require("spatialEco")

require("remotes")

require("devtools")

require("SDMTools")

require('dutri001/bfastSpatial')

require("greenbrown")

library(raster)

require(rts)

library(lubridate)

library(greenbrown)

library(terra)

#

getwd()

setwd("C:/Users/hp/Desktop")

#

all <- list.files("C:/Users/hp/Desktop", full.names = TRUE, pattern = "*.tif")

st <- stack(all)

plot(st);print(st)

#

layer_name <- sub('.', '', names(st ))

layer_name <- ymd(layer_name)

#Create an indices to prepare it for stackApply, which takes the means for all the days of the month within each year.

indices <- format(as.Date(layer_name, format = "%Y.%m.%d"), format = "%Y.m")

Vmean <- stackApply(st, indices, mean)

# https://stackoverflow.com/questions/64514232/how-to-calculate-monthly-ndvi-raster-with-r

#

# TrendRaster(r, start = c(1982, 1), freq = 12, method = c("AAT",

#    "STM", "SeasonalAdjusted"), mosum.pval = 0.05, h = 0.15,

#    breaks = 0, funSeasonalCycle = MeanSeasonalCycle, funAnnual = mean,

#    ...)

#

trends <- TrendRaster(st, start = c(1984, 1), freq=1, method=c("ATT"), breaks = 1)

plot(trends);print(trends)

plot(trends[[4]])

#

#  KENDALL VERSION:

#

k <-raster.kendall(rast(st), p.value=TRUE, z.value=TRUE,  intercept=TRUE, confidence=TRUE,   tau=TRUE)

plot(k); print(k)

# https://app.datacamp.com/workspace/preview?_tag=rdocs&rdocsPath=packages%2FspatialEco%2Fversions%2F1.3-5%2Ftopics%2Fraster.kendall&utm_source=r-docs&utm_medium=docs&utm_term=raster.kendall&utm_content=run_example_in_workspace

# https://stackoverflow.com/questions/54671062/mannkendall-trend-analysis-for-rainfall-raster-in-r-and-its-interpretation

#

#

x=rast(st)

res(x) <- 100     # Change the spatial resolution of an existing object

crs(x) <- "+proj=utm +zone=48 +datum=WGS84"  # Set the coordinate reference system (CRS) (define the projection)

# https://rspatial.org/terra/pkg/3-objects.html

#

getwd()

setwd("C:/Users/hp/Desktop/")

bf <- writeRaster(slope, filename=file.path("slope.tif"),options="INTERLEAVE=BAND", overwrite=TRUE)

bf <- writeRaster(pvalue, filename=file.path("pvalue.tif"),options="INTERLEAVE=BAND", overwrite=TRUE)

bf <- writeRaster(trends[[4]], filename=file.path("trends.tif"),options="INTERLEAVE=BAND", overwrite=TRUE)

#

# KENDALL TREND & PAVALUE

bf <- writeRaster(k[[1]], filename=file.path("kslope.tif"), overwrite=TRUE)

bf <- writeRaster(k[[3]], filename=file.path("kp.value.tif"), overwrite=TRUE)

bf <- writeRaster(k[[4]], filename=file.path("kLCI.tif"), overwrite=TRUE)

bf <- writeRaster(k[[5]], filename=file.path("kUCI.tif"), overwrite=TRUE)

 

 

 

 

 

 

 

 

"Have fun!

If you find any errors or codes that do not work, please feel free to write to me at abdi-basid@outlook.com."

 

Abdi-Basid ADAN

Aucun commentaire:

Enregistrer un commentaire

Access Various Climate Data, Manipulate Different File Formats, and Downscale GCM (CMIP5 and CMP6) and RCM (CORDEX and CORDEX CORE) Models Using a Stochastic Approach, ALL with the R programming Language

  Abdi-Basid ADAN "The purpose of this document is to consolidate and improve the various R scripts used to perform the cited analy...