# This script requires the zip command line tool to be installed inorder to upload shapefiles.
# This zip is installed is the RTools are installed.

# Assuming we have a spreadsheet with the following columns
# {layerName} WMS layer name
# {nativeName} Name of the shapefile with no file extension
# {workspace} Workspace for this feature
# {geoserverUrl} Base URL of the geoserver. http://maps.e-atlas.org.au/maps
# {title} Human readable title of the layer
# {abstract} Abstract text for the layer
# *{keywords} List of keywords associated with the layer separated by commas ",". 
# *{metadataXmlHref} Link to the ISO19115/ISO19139 Metadata for this dataset. For example: http://e-atlas.org.au/geonetwork/srv/en/iso19139.xml?id=36
# {storeName} Name of ths store that this layer is created from.
# {storePath} Path to the Shapefile. Example: data/AU_GA_Topo2M5-2003/drainage/aus25dgd_l.shp, 
# 	data/WT_UQ-Grif-DERM_Aerial-imagrey-1978/mosaic_process1a-1channel.tif
# *{wmsPath} WMS path for the layer
# {attributionTitle} Attribution block title (unused by any existing client)
# {attributionHref}	Attribution block href (unused by any existing client)
# 
# * Optional field that is not required to be in the layers spreadsheet.

# The following are internal variables associated with each data store
# {nativeStoreType} "ASCIIgrid", "GeoTiff", "Shapefile", "CSV", Type prior to an automatic conversion
# {storeType} "Shapefile", "GeoTiff"
# 
# setwd("C:\\Users\\elawrey\\Documents\\2013\\code\\e-atlas-data-tools\\gsLoader_1.0\\gsLoader\\R")



library("RCurl")	# From CRAN
library("XML")		# From CRAN
library("tools") 	# built in library

# This function creates an empty layers.csv as a starting point. Showing all the columns
geoserverRestLoader.exampleLayers.csv <- function() {
	sampleLayers.csv <- data.frame(
		storeName=c("{Region}{?_{Research-program}?}_{Institution}_{Dataset-desc}{?_{Year}?}", "World_NE2_Tiny","World_NE_50m_coastline"),
		layerName=c("{storeName}{?_{Layer-desc}?}","{storeName}", "{storeName}"),
		workspace=c("ea", "ea", "ea"),
		storeType=c("(GeoTiff, Shapefile)", "GeoTiff", "Shapefile"),
		title=c("{Region}:{Dataset-layer-desc-human-readable}{? {Year}?} ({Institution}{?, {Research-program}?})", "World: Natural Earth 2 (NED)", "World: Coastline 1:50M (NED)"),
		abstract=c("Summary text describing the layer (1 sentence). Added to top of metadata record", "This is a low resolution version of the Natural Earth 2 dataset.",
			"Low resolution world coastline 1:50M"),
		metadataXmlHref=c("Link to ISO19115/19139 XML record for dataset", "http://e-atlas.org.au/geonetwork/srv/en/iso19139.xml?id=112", ""),
		keywords=c("List of keywords separated by commas", "Basemap, World", "Natural Earth, World, Coastline"),
		storePath=c("Path to GIS file, relative to list file", "../inst/Natural_Earth_2_Tiny.tif", "../inst/50m_coastline.shp"),
		styles=c("List of style names separated by commas. Must already exist. Blank for Geoserver defaults", "", "line,Generic-polygon-pale-yellow, Generic-polygon-blue-hash, Polygon_Outline-BW, Polygon_Outline-dashed-BW"),
		wmsPath=c("{ISO-topic}{?/{Dataset title}?} if more than one layer", "Imagery Base Maps Earth Cover", "Boundaries"),
		uploadData=c("if TRUE upload the data", TRUE, TRUE),
		uploadMetadata=c("if TRUE set all the layer information from this row", TRUE, TRUE))
	
	exampleLayersFile <- paste(getwd(),"/","exampleLayers.csv", sep="")
	print(paste("Saving example at:", exampleLayersFile))
	write.csv(sampleLayers.csv, exampleLayersFile, row.names=FALSE)
	
	
}

# Use the Geoserver REST API to setup the layers specified in the layerTableCsv
# layerTableCsv - 			Name of the layers csv file that specifies one layer per row. This can also be a data.frame
#							instead of a filename.
# uploadData - 				If FALSE no data is uploaded, only the layer metadata is modified. If TRUE then the data
#							is uploaded. If skipUploadIfStoreExists is FALSE then the data is uploaded regardless
#							of whether the data has been already uploaded to the GeoServer.
# skipUploadIfStoreExists - If TRUE then skip the data upload if the store already exists on the Geoserver. Ignored
#							if uploadData is FALSE.
# skipZipIfAlreadyExists - 	If TRUE then don't recreate the Zipped version of the shapefile if the file already exists.
#							This is handy for reducing the upload time.
geoserverRestLoader <- function(geoserverUser="admin", geoserverPassword="geoserver", 
	geoserverURL="http://localhost:8080/geoserver", layerTableCsv="layers.csv", uploadData=TRUE, skipUploadIfStoreExists=TRUE, skipZipIfAlreadyExists=TRUE) {
	if (class(layerTableCsv) == "data.frame") {
		templateTable <- layerTableCsv
	} else {
		templateTable <- read.csv(layerTableCsv, stringsAsFactors=FALSE)
	}
	#templateTable <- fillEmptyCells(templateTable)
	mergeTable <- columnSubstitute(templateTable)

	userpwd <- paste(geoserverUser, geoserverPassword, sep=":")



	logError <- function(layerName, error) {
		# To be expanded in this future
		stop(paste("Error:",layerName,":", error))
	}

	logWarning <- function(layerName, error) {
		# To be expanded in this future
		print(paste("Warning:",layerName,":", error))
	}

	if (!("storeName" %in% colnames(mergeTable))) {
		logError(layerTableCsv, "Missing storeName column.")
	}
	
	if (!("uploadData" %in% colnames(mergeTable))) {
		logWarning(layerTableCsv,"Contains no uploadData column. Therefore no data will be uploaded")
	}

	if (!("uploadMetadata" %in% colnames(mergeTable))) {
		logWarning(layerTableCsv,"Contains no uploadMetadata column. Therefore no layer metadata will be uploaded")
	}

	if (!("metadataXmlHref" %in% colnames(mergeTable))) {
		logWarning(layerTableCsv,"Contains no metadataXmlHref column. URL to a ISO19115/19139 XML record")
	}

	if (!("styles" %in% colnames(mergeTable))) {
		logWarning(layerTableCsv,"Contains no styles column. A comma separated list of styles. The first one is the default. Styles must exist already.")
	}

	if (!("wmsPath" %in% colnames(mergeTable))) {
		logWarning(layerTableCsv,"Contains no wmsPath column. Path of the wms layer.")
	}

	# Perform some pre tests before starting any uploads.
	for (i in 1:nrow(mergeTable)) {
		m <- mergeTable[i,]
		if (any(grep(" ", m$layerName))) {
			logError(m$layerName,paste("Layername contains space in the name. Please no spaces in the layer names."))
		}
		
		if (any(grep("\\.", m$layerName))) {
			logError(m$layerName,paste("Layername contains one or more '.' in the name. Please no dots in the layer names ",
			"as it breaks the GeoServer REST API."))
		}
	}

	# Test if the layers already exist in the GeoServer.
	for (i in 1:nrow(mergeTable)) {
		print(paste("------- ", Sys.time()," ",i," of ",nrow(mergeTable)," -------",sep=""))
		m <- mergeTable[i,]
		# =========================
		# Does layer already exist
		# =========================
		# layerUrl <- paste(geoserverURL,"/rest/layers/",m$layerName,".xml", sep="")
		# if (url.exists(layerUrl, userpwd=userpwd, httpauth= AUTH_BASIC)) {
			# print(paste("Exists:    ", m$layerName,sep=""))
			# next
		# } 
		
		# print(paste("Not exist: ", m$layerName, sep=""))
		# print(layerUrl)
		
		# Layer name used for updating the metadata. If the data is being uploaded as well then the
		# upload will change the layer name to match the initial name created by the Geoserver after the
		# upload, which corresponds to the GIS file name, with no extension.
		# The act of setting the layer metadata will also rename the layer to match that in the 
		# layers.csv.
		layerName <- m$layerName
		
		
		# ======================================
		# Test if data has already been uploaded
		# ======================================
		if (m$storeType == "Shapefile") {
			restStoreFragments <- c("datastores", "featuretypes")
			# The "file" in front of ".shp" is arbitrary, use what you like.
			restStorePathEnd <- "file.shp"
			contentType <- 'application/zip'
			# Tag name of the configuration of the layer under the workspace store i.e. Root node in the following:
			# http://localhost:8080/geoserver/rest/workspaces/{workspace}/coveragestores/{storeName}/coverages/{layerName}.xml
			# http://localhost:8080/geoserver/rest/workspaces/{workspace}/datastores/{storeName}/featuretypes/{layerName}.xml
			storeType <- 'featureType'	
			
			
		} else if (m$storeType == "GeoTiff") {
			restStoreFragments <- c("coveragestores", "coverages")
			restStorePathEnd <- "file.geotiff"
			contentType <- 'image/tiff'
			storeType <- 'coverage'
		} else {
			stop(paste("Unknown storeType:", m$storeType))
		}

		if (m$uploadData) {
			print("---------------- Uploading ---------------")
			if (!file.exists(m$storePath)) {
				logWarning(m$layerName,paste("Skipping layer as file does not exists for store:",m$storePath))
				next	# Skip to next layer
			} 
			normStorePath <- normalizePath(m$storePath)
			
			
			print(restStoreFragments)
			storeUrl <- paste(geoserverURL,"/rest/workspaces/",m$workspace,"/",restStoreFragments[1],"/",m$storeName,"/?recurse=true", sep="")
			if (skipUploadIfStoreExists && url.exists(storeUrl, userpwd=userpwd, httpauth= AUTH_BASIC)) {
				# Store already exists so skip
				print(paste(m$layerName,": Skipping upload as store already exists in GeoServer: ",storeUrl))
			} else {
				
				
				if (m$storeType == "Shapefile") {
				
					# Find all the files associated with the Shapefile. 
					
					# Test is the path points to a shapefile (i.e. at least has a .shp extension)
					# file_ext from Tools library
					if (file_ext(normStorePath) != "shp") {
						logError(m$layerName, paste("Specified shapefile does not have expected extension of .shp: ",normStorePath))
						next # Skip to next layer
					}
					
					# Test that the files associated with the shapefile exist
					# file_path_sans_ext from Tools library
					shapefileBase <- file_path_sans_ext(normStorePath)
					
					shapefilePartExts <- c(".shp",".shx", ".dbf",".prj")
					
					shapefiles <- paste(shapefileBase, shapefilePartExts, sep="")
					
					# Test that all the files for the shapefile exist
					exist <- file.exists(shapefiles)
					if (any(!exist)) {
						logError(m$layerName, 
							paste("One or more files associated with the shapefile don't exist: ", 
							paste(shapefiles[which(exist)], collapse=",")))
						next # Skip to next layer
					}
					if (uploadData) {
						# Have to zip the files associated with the shapefile so they can be uploaded in one go.
						# Use command line tool for this. 
						# Copyright (c) 1990-2008 Info-ZIP - Type 'zip "-L"' for software license.
						# Zip 3.0 (July 5th 2008). Usage:
						# zip [-options] [-b path] [-t mmddyyyy] [-n suffixes] [zipfile list] [-xi list]
						# The default action is to add or replace zipfile entries from list, which
						# can include the special name - to compress standard input.
						# If zipfile and list are omitted, zip compresses stdin to stdout.
						# -f   freshen: only changed files  -u   update: only changed or new files
						# -d   delete entries in zipfile    -m   move into zipfile (delete OS files)
						# -r   recurse into directories     -j   junk (don't record) directory names
						# -0   store only                   -l   convert LF to CR LF (-ll CR LF to LF)
						# -1   compress faster              -9   compress better
						# -q   quiet operation              -v   verbose operation/print version info
						# -c   add one-line comments        -z   add zipfile comment
						# -@   read names from stdin        -o   make zipfile as old as latest entry
						# -x   exclude the following names  -i   include only the following names
						# -F   fix zipfile (-FF try harder) -D   do not add directory entries
						# -A   adjust self-extracting exe   -J   junk zipfile prefix (unzipsfx)
						# -T   test zipfile integrity       -X   eXclude eXtra file attributes
						# -!   use privileges (if granted) to obtain all aspects of WinNT security
						# -$   include volume label         -S   include system and hidden files
						# -e   encrypt                      -n   don't compress these suffixes
						# -h2  show more help
						
						# Zip of the shapefile to upload to the GeoServer. Make the name a bit
						# unique to reduce the chance of a clash with someother reason for a zip file.
						shapefileZip <- paste(shapefileBase, ".gs.shp.zip", sep="")
						if (!(skipZipIfAlreadyExists && file.exists(shapefileZip))) {
							# Delete any existing zip file.
							if (file.exists(shapefileZip)) {
								file.remove(shapefileZip)
							}
							command <- paste('zip -j "', shapefileZip,'" "', paste(shapefiles, collapse='" "'),'"', sep="")
							print("Zipping shapefile with command")
							print(command)
							
							# Test if the zip command is installed
							if (nchar(Sys.which("zip")) == 0) {
								logError(m$layerName,"No zip command found. Please install the RTools http://cran.r-project.org/bin/windows/Rtools/")
								next
							}
							
							system(command)
							
							# Test if the zip file was created
							if(!file.exists(shapefileZip)) {
								logError(m$layerName,"No zip file was created.")
								next
							}
						} else {
							print(paste("Skipping zip at it already exists:", shapefileZip))
						}
						uploadFile <- shapefileZip
					} else {
						uploadFile <- ""
					}
					# Name of the layer as it is originally created by GeoServer. This corresponds to the
					# name of the GIS file. We later change the name to the one we want.
					layerName <- basename(shapefileBase)
					
					layerName <- gsub(" ","%20", layerName)
				} else if (m$storeType == "GeoTiff") {
					uploadFile <- normStorePath
					# When a coverage is first uploaded its layerName matches the name of the store. This is not
					# the same behaviour as for vector files.
					layerName <- m$storeName
				}

				if (uploadData) {
					# ======================
					# Delete existing store
					# ======================
					storeUrl <- paste(geoserverURL,"/rest/workspaces/",m$workspace,"/",restStoreFragments[1],"/",m$storeName,"/?recurse=true", sep="")
					if (url.exists(storeUrl, userpwd=userpwd, httpauth= AUTH_BASIC)) {
						print(paste("Deleting store:",storeUrl))
						h <- basicTextGatherer()
						res <- getURL(storeUrl,
							  customrequest = "DELETE",
							  userpwd=userpwd, httpauth= AUTH_BASIC,
							  .opts = list(headerfunction = h$update)
							  )
						header = parseHTTPHeader(h$value())
						if (header["status"] == "200") {
							print("Successfully deleted existing copy of the data on the GeoServer")
						} else {
							logError(m$layerName, paste("Deleting existing copy on GeoServer didn' work: code: ", 
								header["status"], " message: ", header["statusMessage"]), storeUrl)
							print(header)
							print(res)
							next
						}
					} else {
						print(paste("Did not find existing store at:", storeUrl))
					}
					
					# ================
					# Upload the data
					# ================
					# curl -u admin:geoserver -XPUT -H "Content-type:image/tiff" --data-binary @realized_PSEHERB.tif http://localhost:8080/geoserver/rest/workspaces/ea/coveragestores/realized_PSEHERB/file.geotiff 
					
					# From https://github.com/wactbprot/R4CouchDB/blob/master/R4CouchDB/R/cdbAddAttachment.R
					# and http://digitheadslabnotebook.blogspot.com.au/2010/09/how-to-send-http-put-request-from-r.html
					noOfBytes <- file.info(uploadFile)$size
					con       <- file(uploadFile, "rb")
					data      <- readBin(con,n=noOfBytes,raw())
					close(con)
					layerUploadUrl <- paste(geoserverURL,"/rest/workspaces/",m$workspace,"/",restStoreFragments[1],"/",m$storeName,"/",restStorePathEnd, sep="")
								
					print(paste("Updating data to:",layerUploadUrl))
					
					h <- basicTextGatherer()
					
					res <- getURL(layerUploadUrl,
						  customrequest = "PUT",
						  postfields = data,
						  httpheader=c("Content-Type"=contentType),
						  userpwd=userpwd, httpauth= AUTH_BASIC,
						  .opts = list(headerfunction = h$update)
						  )
					header = parseHTTPHeader(h$value())
					
					if (header["status"] == "201") {
						print(paste("Successful upload of the data for:", m$layerName))
					} else {
						logError(m$layerName, paste("Uploading data didn't work: code: ", 
							header["status"], " message: ", header["statusMessage"],layerUploadUrl))
						print(header)
						print(res)
						next
					}
				}
			}
	 
			# When uploading shapefile a successful upload returns "". For a coverage
			# successful upload returns an XML document.
			# <coverageStore>
					 # <name>World_NE2_Tiny</name>
					 # <type>GeoTIFF</type>
					 # <enabled>true</enabled>
					 # <workspace>
					  # <name>ea</name>
					  # <href>http://localhost:8080/geoserver/rest/workspaces/ea.xml</href>
					 # </workspace>
					 # <__default>false</__default>
					 # <url>file:data/ea/World_NE2_Tiny/World_NE2_Tiny.geotiff</url>
					 # <coverages>
					  # <atom:link rel="alternate" href="http://localhost:8080/geoserver/rest/workspaces/ea/coveragestores/World_NE2_Tiny/file/coverages.xml" type="application/xml" xmlns:atom="http://www.w3.org/2005/Atom"/>
					 # </coverages>
				# </coverageStore>
			# if (res != "") {
				# logError(m$layerName, res)
				# next
			# }
			
			# if (url.exists(layerHrefInitial, userpwd=userpwd, httpauth= AUTH_BASIC)) {
				# layerName <- layerNameInitial
			# tryCatch({
				# doc <- xmlTreeParse(res,asText=TRUE)
				# top <- xmlRoot(doc)
				# layerConfigHref <- xmlAttrs(top[["coverages"]][["link"]])["href"]
			 # }, error = function(e) {
				# print(res) 
				# logError(m$layerName,paste("Trouble uploading layer. Couldn't parse server response. ", e))
				# next
			# })  
		}
		
		
		
		if (m$uploadMetadata) {
					
			# ------------------
			# MetadataLinks tag.
			# ------------------
			metadataLinksXmlTag <- ''
			# Only create the XML is there is a metadataXmlHref column and its value is not empty
			# If the column exists but is only has blank cells then the read.csv will load it as
			# a logical data type with NA values.
			if ("metadataXmlHref" %in% colnames(m) && (class(m$metadataXmlHref) == "character")) {
				
				if (m$metadataXmlHref != "") {
					# Test to see if the Metadata reference points to a valid and accessible URL.
					if(!url.exists(m$metadataXmlHref)) {
						logWarning(m$layerName,paste("Metadata link is broken:",m$metadataXmlHref))
					}
					metadataLinksXmlTag <- paste(
	'	<metadataLinks>
			<metadataLink>
				<type>text/xml</type>
				<metadataType>TC211</metadataType>
				<content>',m$metadataXmlHref,'</content>
			</metadataLink>
		</metadataLinks>', sep="")
		
				}
			}
			
			# ---------
			# Keywords
			# ---------
			
			# returns string w/o leading or trailing whitespace
			trim <- function (x) gsub("^\\s+|\\s+$", "", x)
			
			keywordsTag <- ''
			if ("keywords" %in% colnames(m)) {
				if (m$keywords != "") {
					keywords <- trim(strsplit(m$keywords,",")[[1]])
					keywordsTag <- paste("\t<keywords>\n\t\t<string>",
						paste(keywords, collapse="</string>\n\t\t<string>"),"</string>\n\t</keywords>",sep="")
				}
			}
			
			
			# Set the name of the layer to the one we want (m$layerName) rather than the one initially
			# created by GeoServer. 
			storeLayerXml <- paste('<',storeType,'>\n',
				'\t<name>',m$layerName,'</name>\n',
				'\t<title>',m$title,'</title>\n',
				'\t<abstract>',m$abstract,'</abstract>\n',
				keywordsTag,'\n',metadataLinksXmlTag,'\n',
				'\t<enabled>true</enabled>\n',
				'\t<advertised>true</advertised>\n','
				</',storeType,'>',sep="")
			
			# http://localhost:8080/geoserver/rest/workspaces/{workspace}/datastores/{storeName}/featuretypes/{layerName}.xml
			# http://localhost:8080/geoserver/rest/workspaces/{workspace}/coveragestores/{storeName}/coverages/{layerName}.xml
			# ====================================================
			# Set the title, abstract, metadata, etc for the data

			# Because of what seems to be a bug in the Geoserver if a layer has a space in the name
			# then the space is escaped to %20 and the percent is escaped to %25 resulting in an XML
			# end point with %2520 in the name.
			# http://localhost:8080/geoserver/rest/workspaces/ts/datastores/TS_TSRA_SLUP-2010_Badu-Annotation/featuretypes/Badu%2520Annotation.xml
			layerNameXmlEndPoint <- gsub("%","%25", layerName)
			
			storeLayerHref <- paste(
				geoserverURL,"/rest/workspaces/",m$workspace,"/",restStoreFragments[1],"/",m$storeName,"/",restStoreFragments[2],"/",
					layerNameXmlEndPoint,".xml", sep="")
			storeLayerHref <- gsub(" ","%20", storeLayerHref)
			
			
			h <- basicTextGatherer()
			res <- getURL(storeLayerHref,
				  customrequest = "PUT",
				  postfields = storeLayerXml,
				  httpheader=c("Content-Type"='application/xml'),
				  userpwd=userpwd, httpauth= AUTH_BASIC,
				  .opts = list(headerfunction = h$update)
				  )
			header = parseHTTPHeader(h$value())
			
			if (header["status"] == "200") {
				print(paste("Successful update layer store metadata:",storeLayerHref))
			} else {
				logError(m$layerName, paste("Updating layer store metadata didn't work: code: ", 
					header["status"], " message: ", header["statusMessage"], storeLayerHref))
				print(header)
				print(res)
				cat(storeLayerXml, "\n")
				next
			}

			
			# --------------------
			# WMS Path and Styles
			# --------------------
			
			# -------
			# Styles
			# -------
			# Don't create a styles tag if the column is not available or the cell is empty. This will result
			# in this variable not being modified in the Geoserver. It will therefore keep the existing
			# default value. We do this because it is invalid for the setting to be empty.
			defaultStylesXmlTag <- ''
			stylesXmlTag <- ''
			if ("styles" %in% colnames(m)) {
				# Don't create a styles tag if the cell is empty. This will result
				# in this variable not being modified in the Geoserver.
				if (m$styles != "") {
					styles <- trim(strsplit(m$styles,",")[[1]])
					defaultStylesXmlTag <- paste('\t<defaultStyle>\n',
						'\t\t<name>',styles[1],'</name>\n',
						'\t</defaultStyle>\n', sep="")
					if (length(styles) > 1) {
						stylesXmlTag <- paste(
							'\t<styles class="linked-hash-set">\n',
							'\t\t<style>\n',
							'\t\t\t<name>',	paste(styles[2:length(styles)], collapse='</name>\n\t\t</style>\n\t\t<style>\n\t\t\t<name>'),'</name>\n',
							'\t\t</style>\n',
							'\t</styles>\n', sep="")
					} 
				}
			}
			
			# ---------
			# WMS path
			# ---------
			# If no wmsPath column exist then don't modify this setting
			wmsPathXmlTag <- ''
			if ("wmsPath" %in% colnames(m)) {
				wmsPathXmlTag <- paste('\t<path>',m$wmsPath,'</path>\n', sep="")
			}
			
			
			layerXml <- paste(
			'<layer>\n',
			'\t<name>',m$layerName,'</name>\n',
			wmsPathXmlTag,
			defaultStylesXmlTag,
			stylesXmlTag,
			'\t<enabled>true</enabled>\n',
			'\t<advertised>true</advertised>\n',
			'</layer>', sep="")
			
			# If the layer name contains a space then the XML end point is like
			# http://localhost:8080/geoserver/rest/layers/Badu%2520Annotation.xml
			layerHref <- paste(
				geoserverURL,"/rest/layers/",m$layerName,".xml", sep="")

			h <- basicTextGatherer()
			res <- getURL(layerHref,
				  customrequest = "PUT",
				  postfields = layerXml,
				  httpheader=c("Content-Type"='application/xml'),
				  userpwd=userpwd, httpauth= AUTH_BASIC,
				  .opts = list(headerfunction = h$update)
				  )
			header = parseHTTPHeader(h$value())
			
			if (header["status"] == "200") {
				print(paste("Successful update layer metadata:",layerHref))
				# cat(layerXml, "\n")
			} else {
				logError(m$layerName, paste("Updating layer metadata didn't work: code: ", 
					header["status"], " message: ", header["statusMessage"], layerHref))
				print(header)
				print(res)
				cat(layerXml, "\n")
				next
			}
			
		}
		
	}
}
	# if (m$storeType == "GeoTiff") {
		# coverageUrl <- paste(geoserverURL,"/rest/workspaces/",m$workspace,"/coveragestores/",m$storeName,"/coverages/",m$layerName,".xml", sep="")
		# if (url.exists(coverageUrl, userpwd=userpwd, httpauth= AUTH_BASIC)) {
			# print(paste("Exists:    ", m$layerName,sep=""))
		# } else {
			# print(paste("Not exist: ", m$layerName, sep=""))
			# print(coverageUrl)
		# }
		
		
		# # url.exists(
			# # "http://localhost:8080/geoserver/rest/workspaces/ea/coveragestores/GBR_JCU_Bathymetry-3DGBR_Hillshade/coverages/GBR_JCU_Bathymetry-3DGBR_Hillshade.xml", 
			# # userpwd="admin:geoserver", httpauth= AUTH_BASIC)
	# } else if (m$storeType == "Shapefile") {
		# #http://localhost:8080/geoserver/rest/workspaces/ea/datastores/GBR_JCU_3DGBR-geomorph_Coralsea-bank/featuretypes/GBR_JCU_3DGBR-geomorph_Coralsea-bank.xml
		# featureUrl <- paste(geoserverURL,"/rest/workspaces/",m$workspace,"/datastores/",m$storeName,"/featuretypes/",m$layerName,".xml", sep="")
		# if (url.exists(featureUrl, userpwd=userpwd, httpauth= AUTH_BASIC)) {
			# print(paste("Exists:    ", m$layerName,sep=""))
		# } else {
			# print(paste("Not exist: ", m$layerName, sep=""))
			# print(featureUrl)
		# }
	# }

