######Recommended Practice: Drought classification using weighted seasonal VCI data (See "in detail" page for more information) ###### #Enter the path to the a folder where you have enough free space to store your MODIS data and the resulting products #Please do _not_ use backslashes ('\'). dataPath <- "X:/Your/Path/here/" #Enter the path to your appEEARS download-list (including ".txt"!). If you downloaded the data from appEEARS beforehand, enter the path to the data here (all data needs to be in one folder) #Please do _not_ use backslashes ('\'). downloadList <- "X:/Your/Path/here/download.txt" #Enter the location of the greenbrown-package you downloaded according to the step-by-step guide (including ".tar.gz"!) dir_GB <- "X:/Your/Path/here/greenbrown_2.4.3.tar.gz" #Hemisphere of your study-area: 1 = north ; 0 = south hemisphere <- 1 #Quality masking: replace the '0' with a '1' to apply MODIS pixel reliability data. If you are not sure whether you should enable this option, it is recommended to leave it as is. Check the "in detail" page for more information. cloudmask <- 0 #No need to enter more - mark the whole code (ctrl + a) and click on 'run' (or press ctrl + enter) ############################################################# ############################################################# ############################################################# ############################################################# ############################################################# ####Installing all needed packages if ("curl" %in% rownames(installed.packages()) == FALSE) { install.packages("curl") } if ("raster" %in% rownames(installed.packages()) == FALSE) { install.packages("raster") } if ("strucchange" %in% rownames(installed.packages()) == FALSE) { install.packages("strucchange") } if ("Kendall" %in% rownames(installed.packages()) == FALSE) { install.packages("Kendall") } if ("ncdf4" %in% rownames(installed.packages()) == FALSE) { install.packages("ncdf4") } if ("bfast" %in% rownames(installed.packages()) == FALSE) { install.packages("bfast") } if ("phenopix" %in% rownames(installed.packages()) == FALSE) { install.packages("phenopix") } if ("fields" %in% rownames(installed.packages()) == FALSE) { install.packages("fields") } if ("doParallel" %in% rownames(installed.packages()) == FALSE) { install.packages("doParallel") } if ("snow" %in% rownames(installed.packages()) == FALSE) { install.packages("snow") } if ("quantreg" %in% rownames(installed.packages()) == FALSE) { install.packages("quantreg") } if ("parallel" %in% rownames(installed.packages()) == FALSE) { install.packages("parallel") } if ("zoo" %in% rownames(installed.packages()) == FALSE) { install.packages("zoo") } if ("sandwich" %in% rownames(installed.packages()) == FALSE) { install.packages("sandwich") } #Installing greenbrown for seasonality if ("greenbrown" %in% rownames(installed.packages()) == FALSE) { install.packages(dir_GB, repos = NULL, type="source") } library(raster) library(curl) library(greenbrown) #Downloading Files (except 'VI quality control' data as they are not needed) #If you downloaded your data from appEEARS beforehand, comment out the following seven lines filesl <- scan(downloadList, what='list', sep='\n') for (d in 1:length(filesl)){ if (grepl("_VI_Quality_",filesl[d]) == F){ curl_download(url=filesl[d], destfile=paste0(dataPath,"/",basename(filesl[d])),quiet = T, handle = new_handle()) } print(paste0('Downloading source data (Step 1 of 6): ',round(d / length(filesl) * 100, digits=2),'%')) } #Creating a temp-directory and setting memory limit dir.create(paste0(dataPath,'/temp/')) rasterOptions(tmpdir=paste0(dataPath,'/temp/')) rasterOptions(tolerance=1) memory.limit(50000) #Listing up all downloaded TIF-files in the data folder rasterData <- list.files(path=dataPath, pattern='.tif$', recursive=F, ignore.case=T, full.names=T) rasterFiles <- basename(rasterData) #Chunk-size for processing (in Pixel) #You can reduce these numbers if you run into RAM-Problems early on: #Try reducing the numbers to 1000. If you still run into RAM-problems, reduce to 500 or 250. #Only reduce the numbers if you have memory issues ("Error: cannot allocate vector of size [...]") chwidth <- 1500 chheight <- 1500 ##################STARTING EVI/VCI BLOCK ################## ################## ################## #Listing all EVI rasters and their corresponding pixel reliability data EVIrasterData <- rasterData[grepl('EVI',rasterData)] EVIqc <- rasterData[grepl('pixel_reliability',rasterData)] #(Automatically adjusting chunk size for small scenes if needed) initial <- raster(EVIrasterData[1]) if (as.numeric(ncol(initial)) <= chwidth || as.numeric(nrow(initial)) <= chheight){ chwidth <- ceiling(ncol(initial)/2) } #Parsing all DOY and Years from the filenames DOYs <- unique(substr(basename(EVIrasterData),38,40)) YEARs <- unique(substr(basename(EVIrasterData),34,37)) ##VCI: chunkwise calculation #Creating output folder dir.create(paste0(dataPath,'/VCI')) dir.create(paste0(dataPath,'/EVI')) #Determining chunk-shapefile #Loading example image from the downloaded data exRST <- raster(EVIrasterData[1]) #Determining chunks h <- ceiling(ncol(exRST)/ceiling(ncol(exRST) / chwidth)) v <- ceiling(nrow(exRST)/ceiling(nrow(exRST) / chheight)) #Creating shapefile for each chunk split <- aggregate(exRST,fact=c(h,v)) split[] <- 1:ncell(split) splitshp <- rasterToPolygons(split) names(splitshp) <- 'shapes' notiles <- ncell(split) #Filtervalues for quality masking: "0" and "1" in the pixel reliability bands are accepted as sufficient quality goodValues <- c(0,1) #Masking clouds/snow; Splitting data into chunks for (d in 1:length(DOYs)){ #Filtering all relevant data for this DOY vrasterData <- EVIrasterData[grepl(paste0(DOYs[d],'_aid'),EVIrasterData)] #..and their corresponding pixel reliability data vQC <- EVIqc[grepl(paste0(DOYs[d],'_aid'),EVIqc)] #Reading years of available data vYear <- substr(basename(vrasterData),34,37) for (y in 1:length(vYear)){ viPRE <- raster(vrasterData[y]) #Applying quality mask to each image (if masking was activated) if (cloudmask == 1){ qc <- raster(vQC[y]) viPRE <- overlay(viPRE, qc, fun = function(x, y) { x[!(y %in% goodValues)] <- NA return(x) }) } #####Splitting (masked) data into Chunks for(i in 1:ncell(split)){ ex <- extent(splitshp[splitshp$shapes==i,]) exx <- crop(viPRE,ex) writeRaster(exx,filename=paste0(dataPath,'/temp/',DOYs[d],'_',vYear[y],'_EVICHUNK',formatC(i, width=3, flag='0')),format='GTiff', overwrite=TRUE) } } #(Progress report) print(paste0('Data preparation (VCI) & masking (Step 2 of 6): ',round(d / length(DOYs) * 100, digits=2),'%')) } #Applying VCI calculation for each chunk #List all chunks EVIchunks <- list.files(path=paste0(dataPath,'/temp/'), pattern='_EVICHUNK', recursive=F, ignore.case=T, full.names=T) for (d in 1:length(DOYs)){ for (t in 1:notiles){ #Filtering relevant chunks for this specific date sEVIchunks <- EVIchunks[grepl(paste0(DOYs[d],'_'),EVIchunks)] sEVIchunks <- sEVIchunks[grepl(paste0('_EVICHUNK',formatC(t, width=3, flag='0')),sEVIchunks)] #Listing years of data available for each DOY vvYear <- substr(basename(sEVIchunks),5,8) if (length(sEVIchunks) > 0){ sT <- stack(sEVIchunks) #Removing filler-values from EVI data sT[sT<(-2999)] <- NA #VCI formula application vimax <- stackApply(sT , rep(1, nlayers (sT)), max, na.rm=T) vimin <- stackApply(sT , rep(1, nlayers (sT)), min, na.rm=T) z <- vimax - vimin VCI <- ((sT -vimin)/z)*100 #Writing VCI-chunks for each available year for (y in 1:length(vvYear)){ writeRaster(VCI[[y]],filename=paste0(dataPath,'/temp/',DOYs[d],'_',vvYear[y],'_VCICHUNK',formatC(t, width=3, flag='0')),format='GTiff', overwrite=TRUE) } } } #(Progress report) print(paste0('VCI processing (Step 3 of 6): ',round(d / length(DOYs) * 100, digits=2),'%')) } ##VCI chunk merging and output #Listing all created chunks VCIchunks <- list.files(path=paste0(dataPath,'/temp/'), pattern='_VCICHUNK', recursive=F, ignore.case=T, full.names=T) #Looping through each available DOY_YEAR combination for (y in 1:length(YEARs)){ for (d in 1:length(DOYs)){ #Listing relevant chunks (EVI, VCI) for this specific date sVCIchunks <- VCIchunks[grepl(paste0(DOYs[d],'_',YEARs[y],'_'),VCIchunks)] sEVIchunks <- EVIchunks[grepl(paste0(DOYs[d],'_',YEARs[y],'_'),EVIchunks)] #Creating raster-list of the EVI chunks if (length(sEVIchunks) > 0){ if (length(sEVIchunks) > 1){ sMos <- list() for (o in 1:length(sEVIchunks)){ sMos <- append(sMos, raster(sEVIchunks[o])) } #Mosaicking the EVI chunks sMos$fun = mean Mos <- do.call(mosaic, sMos) Mos <- Mos*0.0001 #Output of mosaicked, scaled EVI image writeRaster(Mos,filename=paste0(dataPath,'/EVI/',DOYs[d],'_',YEARs[y],'_EVI'),format='GTiff', overwrite=TRUE) }else{ #Fail-safe in case only one chunk is available exp <- raster(sEVIchunks[1]) exp <- exp*0.0001 writeRaster(exp,filename=paste0(dataPath,'/EVI/',DOYs[d],'_',YEARs[y],'_EVI'),format='GTiff', overwrite=TRUE) } } #Creating raster-list of the VCI chunks if (length(sVCIchunks) > 0){ if (length(sVCIchunks) > 1){ sMos <- list() for (o in 1:length(sVCIchunks)){ sMos <- append(sMos, raster(sVCIchunks[o])) } #Mosaicking the VCI chunks sMos$fun = mean Mos <- do.call(mosaic, sMos) #Output: final VCI mosaic writeRaster(Mos,filename=paste0(dataPath,'/VCI/',DOYs[d],'_',YEARs[y],'_VCI'),format='GTiff', overwrite=TRUE) }else{ #Fail-safe in case only one chunk is available writeRaster(raster(sVCIchunks[1]),filename=paste0(dataPath,'/VCI/',DOYs[d],'_',YEARs[y],'_VCI'),format='GTiff', overwrite=TRUE) } } print(paste0('EVI output & VCI output (Step 4 of 6): ',round((d+(length(DOYs)*(y-1)))/(length(DOYs)*length(YEARs))*100, digits=2),'%')) } } #Removing temp files to free space tmp <- list.files(path=paste0(dataPath,'/temp/'), recursive=F, ignore.case=T, full.names=T) file.remove(tmp) ##################STARTING SEASONALITY BLOCK ################## ################## ################## #Creating output folder dir.create(paste0(dataPath,'/season/')) ###Seasonality for each year #Listing EVI data which is the base for seasonality calculation allEVI <- list.files(paste0(dataPath,'/EVI/'), pattern=".tif$", recursive=F, full.names = T) #Creating a stack of all EVI data sEVI <- stack(allEVI) #Passing a starting year to greenbrown for proper output filenames stDate <- c(as.numeric(min(YEARs)),01,01) #Start multicore processing #The seasonality-calculation will take advantage of all CPU cores. The PC might lag during the process and may not be usable for other tasks in a productive way #If you want to keep using your PC during the process, reduce the number of used cores by one. To do this, replace max(as.numeric(Sys.getenv('NUMBER_OF_PROCESSORS')),1) with max(as.numeric(Sys.getenv('NUMBER_OF_PROCESSORS'))-1,1) in the following line. no_cores <- max(as.numeric(Sys.getenv('NUMBER_OF_PROCESSORS')),1) print(paste0(Sys.time()," Seasonality calculation (this will take a while; Step 5 of 6)")) beginCluster(no_cores) #Seasonality calculation. Check the greenbrown-package documentation at http://greenbrown.r-forge.r-project.org/ and the "in detail" page for more information on the calculation phenmap <- clusterR(sEVI,fun=PhenologyRaster,args=list(start = stDate, freq = 23, tsgf=NULL, approach="Deriv", check.seasonality=NULL)) #Ending multi-core processing endCluster() #Output Phenology n <- NamesPhenologyRaster(phenmap, start=as.numeric(YEARs[1])) for (i in 1:nlayers(phenmap)){ writeRaster(phenmap[[i]],filename=paste0(dataPath,"/season/",n[i],".tif"),format="GTiff",overwrite=T) } ##################STARTING WEIGHTING BLOCK ################## ################## ################## #List all finished VCI images allVCI <- list.files(paste0(dataPath,'/VCI/'), pattern=".tif$", recursive=F, full.names = T) #Create an output-folder for the final output dir.create(paste0(dataPath,'/weighted')) #Looping through each year(/season) for (sy in as.numeric(min(YEARs)):as.numeric(max(YEARs))){ #Fail-safe: check if seasonality data was calculated for the year if (file.exists(paste0(dataPath,"/season/","SOS.",sy,".tif")) && !(sy == max(YEARs) && hemisphere != 1)){ #Check for hemisphere, load VCI data for the year if (hemisphere == 1){ lyVCI <- allVCI[grepl(sy,allVCI)] dscr <- sy }else{ #Load correct data if southern hemisphere was selected (seasonality shift) lyVCI <- c(allVCI[grepl(sy,allVCI)],allVCI[grepl(sy+1,allVCI)]) if (sy == min(YEARs)){ lyVCI <- lyVCI[1:23] }else{ lyVCI <- lyVCI[13:35] } #Adjusting output filenames if southern hemisphere was selected dscr <- paste0(sy,"_",sy+1) } #Stacking all VCI for the year yVCI <- stack(lyVCI) #Loading seasonality rasters for the year SoS <- raster(paste0(dataPath,"/season/","SOS.",sy,".tif")) Peak <- raster(paste0(dataPath,"/season/","POP.",sy,".tif")) EoS <- raster(paste0(dataPath,"/season/","EOS.",sy,".tif")) #Creating VCI means for the three "blocks" of seasonal timing #####Start of season variables BlockA <- stack() #Iterating through all 23 images of the season for (i in 1:23){ #Read start-of-season dates s <- SoS #Read dates up to 16 days before peak of season e <- (Peak - 1) #Check if current iteration lies within the seasonal dates of each pixel s[s > i] <- NA e[e < i] <- NA #Creating a binary mask and apply to the VCI image m <- s*e v <- yVCI[[i]]*(m/m) #Add layer to stack BlockA <- stack(BlockA, v) } #Create mean of this time-block BlockA <- calc(BlockA, fun=mean, na.rm=T) #####Peak of season variables #See comments above BlockB <- stack() for (i in 1:23){ s <- Peak e <- Peak s[s > i] <- NA e[e < i] <- NA m <- s*e v <- yVCI[[i]]*(m/m) BlockB <- stack(BlockB, v) } BlockB <- calc(BlockB, fun=mean, na.rm=T) #####End of season variables #See comments above BlockC <- stack() for (i in 1:23){ s <- (Peak + 1) e <- EoS s[s > i] <- NA e[e < i] <- NA m <- s*e v <- yVCI[[i]]*(m/m) BlockC <- stack(BlockC, v) } BlockC <- calc(BlockC, fun=mean, na.rm=T) #Weighting: 5-7-1 start-peak-end of season mean #Result: weighted VCI by season WeightedStack <- stack(BlockA,BlockA,BlockA,BlockA,BlockA,BlockB,BlockB,BlockB,BlockB,BlockB,BlockB,BlockB,BlockC) WeightedVCI <- calc(WeightedStack, fun=mean,na.rm=T) #Output: five classes cWeightedVCI <- reclassify(WeightedVCI, c(-1, 10, 4, 10, 20, 3, 20, 30, 2, 30,40, 1,40,101,0)) writeRaster(cWeightedVCI,filename=paste0(dataPath,"/weighted/",dscr,"_droughtFiveCl.tif"),format="GTiff",overwrite=T) #Output: three classes dWeightedVCI <- reclassify(WeightedVCI, c(-1, 10, 2, 10, 40, 1, 40, 101, 0)) writeRaster(dWeightedVCI,filename=paste0(dataPath,"/weighted/",dscr,"_droughtThreeCl.tif"),format="GTiff",overwrite=T) #(Progress report) print(paste0('Weighting indices (Step 6 of 6): ',round((sy-as.numeric(min(YEARs))) / (as.numeric(max(YEARs))-as.numeric(min(YEARs))) * 100, digits=2),'%')) } } #Deleting all temp files tmp <- list.files(path=paste0(dataPath,'/temp/'), recursive=F, ignore.case=T, full.names=T) file.remove(tmp) #Finished print(paste0(Sys.time(),' - Finished. You will find the data in the subfolders \'VCI\' and \'weighted\' (classified drought indicators) at ', dataPath,"."))