From 1bfb235015cb3f6525ca1631b0bec0335ee8578b Mon Sep 17 00:00:00 2001 From: Andrew Gene Brown Date: Sat, 25 Jan 2020 08:42:03 -0800 Subject: [PATCH] all examples that require internet now use curl::has_internet() as part of their "dependencies" for running --- R/fetchSDA_spatial.R | 36 ++-- man/OSDquery.Rd | 7 +- man/SDA_query.Rd | 191 +++++++++-------- man/SSURGO_spatial_query.Rd | 78 +++---- man/fetchHenry.Rd | 124 +++++------ man/fetchKSSL.Rd | 227 ++++++++++---------- man/fetchNASISWebReport.Rd | 9 +- man/fetchOSD.Rd | 164 +++++++------- man/fetchRaCA.Rd | 125 +++++------ man/fetchSCAN.Rd | 104 ++++----- man/fetchSDA_component.Rd | 379 ++++++++++++++++----------------- man/fetchSDA_spatial.Rd | 45 ++-- man/mapunit_geom_by_ll_bbox.Rd | 123 +++++------ man/seriesExtent.Rd | 69 +++--- man/siblings.Rd | 20 +- man/uncode.Rd | 104 ++++----- man/us_ss_timeline.Rd | 12 +- 17 files changed, 933 insertions(+), 884 deletions(-) diff --git a/R/fetchSDA_spatial.R b/R/fetchSDA_spatial.R index 7ee94a6f..7c90ef8d 100644 --- a/R/fetchSDA_spatial.R +++ b/R/fetchSDA_spatial.R @@ -5,26 +5,30 @@ #' @param x A vector of MUKEYs or national mapunit symbols. #' @param by.col Column name containing mapunit identifier ("mukey" or "nmusym"); default: "mukey" #' @param method geometry result type: 'feature' returns polygons, 'bbox' returns the bounding box of each polygon, and 'point' returns a single point within each polygon. -#' @param add.fields Column names from `mapunit` table to add to result. Must specify table name prefix as either `G` or `mapunit`. -#' @param chunk.size How many queries should spatial request be divided into? Necessary for large extents. Default: 10 -#' @return A SpatialPolygonsDataFrame corresponding to SDA spatial data for all MUKEYs / nmusyms requested. Default result contains MupolygonWktWgs84-derived geometry with attribute table containing `gid`, `mukey` and `nationalmusym`, additional fields in result are specified with `add.fields`. +#' @param add.fields Column names from `mapunit` table to add to result. Must specify table name prefix `mapunit` before column name (e.g. `mapunit.muname`). +#' @param chunk.size How many queries should spatial request be divided into? Necessary for large results. Default: 10 +#' @return A Spatial*DataFrame corresponding to SDA spatial data for all MUKEYs / nmusyms requested. Default result contains mapunit delineation geometry with attribute table containing `gid`, `mukey` and `nationalmusym`, plus additional fields in result specified with `add.fields`. #' @author Andrew G. Brown. #' @examples #' \donttest{ -#' # get spatial data for a single mukey -#' single.mukey <- fetchSDA_spatial(x = "2924882") -#' -#' # demonstrate fetching full extent (multi-mukey) of national musym -#' full.extent.nmusym <- fetchSDA_spatial(x = "2x8l5", by = "nmusym") -#' -#' # compare extent of nmusym to single mukey within it -#' if(require(sp)) { -#' plot(full.extent.nmusym, col = "RED",border=0) -#' plot(single.mukey, add = TRUE, col = "BLUE", border=0) +#' if(requireNamespace("curl") & +#' curl::has_internet()) { +#' +#' # get spatial data for a single mukey +#' single.mukey <- fetchSDA_spatial(x = "2924882") +#' +#' # demonstrate fetching full extent (multi-mukey) of national musym +#' full.extent.nmusym <- fetchSDA_spatial(x = "2x8l5", by = "nmusym") +#' +#' # compare extent of nmusym to single mukey within it +#' if(require(sp)) { +#' plot(full.extent.nmusym, col = "RED",border=0) +#' plot(single.mukey, add = TRUE, col = "BLUE", border=0) +#' } +#' +#' # demo adding a field (`muname`) to attribute table of result +#' head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) #' } -#' -#' # demo adding a field (`muname`) to attribute table of result -#' head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) #' } #' @rdname fetchSDA_spatial #' @export fetchSDA_spatial diff --git a/man/OSDquery.Rd b/man/OSDquery.Rd index 3fe09b2e..f25a9a69 100644 --- a/man/OSDquery.Rd +++ b/man/OSDquery.Rd @@ -58,11 +58,16 @@ \examples{ \donttest{ -if(require(aqp)) { +if(requireNamespace("curl") & + curl::has_internet() & + require(aqp)) { + # find all series that list Pardee as a geographically associated soil. s <- OSDquery(geog_assoc_soils = 'pardee') + # get data for these series x <- fetchOSD(s$series, extended = TRUE, colorState = 'dry') + # simple figure par(mar=c(0,0,1,1)) plot(x$SPC) diff --git a/man/SDA_query.Rd b/man/SDA_query.Rd index 93d203ec..5dc0cf3c 100644 --- a/man/SDA_query.Rd +++ b/man/SDA_query.Rd @@ -1,94 +1,97 @@ -\name{SDA_query} -\alias{SDA_query} -\alias{makeChunks} -\alias{format_SQL_in_statement} - -\title{Soil Data Access Query} -\description{Submit a query to the Soil Data Acccess (SDA) website in SQL, get the results as a dataframe.} - -\usage{ -SDA_query(q) -makeChunks(ids, size=100) -format_SQL_in_statement(x) -} - -\arguments{ - \item{q}{a valid T-SQL query surrounded by double quotes} - \item{ids}{vector of IDs for chunking, contents aren't used just length} - \item{size}{target chunk size} - \item{x}{character vector to be packed into an SQL `IN` statement} -} - -\details{The SDA website can be found at \url{http://sdmdataaccess.nrcs.usda.gov} and query examples can be found at \url{http://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query examples can be found at \url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. - -SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. -} - -\value{A dataframe containing the results. NULL is retutned when queries result in 0 matches rows.} -\author{D.E. Beaudette} -\note{This function requires the `httr`, `jsonlite`, and `XML` packages} - -\seealso{\code{\link{mapunit_geom_by_ll_bbox}}} - -\examples{ -\donttest{ - - ## get SSURGO export date for all soil survey areas in California - # there is no need to filter STATSGO - # because we are filtering on SSURGO areasymbols - q <- "SELECT areasymbol, saverest FROM sacatalog WHERE areasymbol LIKE 'CA\%';" - x <- SDA_query(q) - head(x) - - - ## get SSURGO component data associated with the - ## Amador series / major component only - # this query must explicitly filter out STATSGO data - q <- "SELECT cokey, compname, comppct_r FROM legend\n - INNER JOIN mapunit mu ON mu.lkey = legend.lkey\n - INNER JOIN component co ON mu.mukey = co.mukey\n - WHERE legend.areasymbol != 'US' AND compname = 'Amador';" - - res <- SDA_query(q) - str(res) - - - ## get component-level data for a specific soil survey area (Yolo county, CA) - # there is no need to filter STATSGO because the query contains - # an implicit selection of SSURGO data by areasymbol - q <- "SELECT \n - component.mukey, cokey, comppct_r, compname, taxclname, \n - taxorder, taxsuborder, taxgrtgroup, taxsubgrp \n - FROM legend \n - INNER JOIN mapunit ON mapunit.lkey = legend.lkey \n - LEFT OUTER JOIN component ON component.mukey = mapunit.mukey \n - WHERE legend.areasymbol = 'CA113' ;" - - res <- SDA_query(q) - str(res) - - - ## get tabular data based on result from spatial query - # there is no need to filter STATSGO because - # SDA_Get_Mukey_from_intersection_with_WktWgs84() implies SSURGO - # - # requires raster and rgeos packages because raster is suggested - # and rgeos is additional - if(require(raster) & require(rgeos)) { - # text -> bbox -> WKT - # xmin, xmax, ymin, ymax - b <- c(-120.9, -120.8, 37.7, 37.8) - p <- writeWKT(as(extent(b), 'SpatialPolygons')) - q <- paste0("SELECT mukey, cokey, compname, comppct_r FROM component \n - WHERE mukey IN (SELECT DISTINCT mukey FROM\n - SDA_Get_Mukey_from_intersection_with_WktWgs84('" - , p, "')) ORDER BY mukey, cokey, comppct_r DESC") - - x <- SDA_query(q) - str(x) - } -} -} - -\keyword{manip} - +\name{SDA_query} +\alias{SDA_query} +\alias{makeChunks} +\alias{format_SQL_in_statement} + +\title{Soil Data Access Query} +\description{Submit a query to the Soil Data Acccess (SDA) website in SQL, get the results as a dataframe.} + +\usage{ +SDA_query(q) +makeChunks(ids, size=100) +format_SQL_in_statement(x) +} + +\arguments{ + \item{q}{a valid T-SQL query surrounded by double quotes} + \item{ids}{vector of IDs for chunking, contents aren't used just length} + \item{size}{target chunk size} + \item{x}{character vector to be packed into an SQL `IN` statement} +} + +\details{The SDA website can be found at \url{http://sdmdataaccess.nrcs.usda.gov} and query examples can be found at \url{http://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query examples can be found at \url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. + +SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. +} + +\value{A dataframe containing the results. NULL is retutned when queries result in 0 matches rows.} +\author{D.E. Beaudette} +\note{This function requires the `httr`, `jsonlite`, and `XML` packages} + +\seealso{\code{\link{mapunit_geom_by_ll_bbox}}} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + ## get SSURGO export date for all soil survey areas in California + # there is no need to filter STATSGO + # because we are filtering on SSURGO areasymbols + q <- "SELECT areasymbol, saverest FROM sacatalog WHERE areasymbol LIKE 'CA\%';" + x <- SDA_query(q) + head(x) + + + ## get SSURGO component data associated with the + ## Amador series / major component only + # this query must explicitly filter out STATSGO data + q <- "SELECT cokey, compname, comppct_r FROM legend\n + INNER JOIN mapunit mu ON mu.lkey = legend.lkey\n + INNER JOIN component co ON mu.mukey = co.mukey\n + WHERE legend.areasymbol != 'US' AND compname = 'Amador';" + + res <- SDA_query(q) + str(res) + + + ## get component-level data for a specific soil survey area (Yolo county, CA) + # there is no need to filter STATSGO because the query contains + # an implicit selection of SSURGO data by areasymbol + q <- "SELECT \n + component.mukey, cokey, comppct_r, compname, taxclname, \n + taxorder, taxsuborder, taxgrtgroup, taxsubgrp \n + FROM legend \n + INNER JOIN mapunit ON mapunit.lkey = legend.lkey \n + LEFT OUTER JOIN component ON component.mukey = mapunit.mukey \n + WHERE legend.areasymbol = 'CA113' ;" + + res <- SDA_query(q) + str(res) + + + ## get tabular data based on result from spatial query + # there is no need to filter STATSGO because + # SDA_Get_Mukey_from_intersection_with_WktWgs84() implies SSURGO + # + # requires raster and rgeos packages because raster is suggested + # and rgeos is additional + if(require(raster) & require(rgeos)) { + # text -> bbox -> WKT + # xmin, xmax, ymin, ymax + b <- c(-120.9, -120.8, 37.7, 37.8) + p <- writeWKT(as(extent(b), 'SpatialPolygons')) + q <- paste0("SELECT mukey, cokey, compname, comppct_r FROM component \n + WHERE mukey IN (SELECT DISTINCT mukey FROM\n + SDA_Get_Mukey_from_intersection_with_WktWgs84('" + , p, "')) ORDER BY mukey, cokey, comppct_r DESC") + + x <- SDA_query(q) + str(x) + } +} +} +} + +\keyword{manip} + diff --git a/man/SSURGO_spatial_query.Rd b/man/SSURGO_spatial_query.Rd index c2a379e8..f569f1d9 100644 --- a/man/SSURGO_spatial_query.Rd +++ b/man/SSURGO_spatial_query.Rd @@ -1,37 +1,41 @@ -\name{SoilWeb_spatial_query} -\alias{SoilWeb_spatial_query} - -\title{Get SSURGO Data via Spatial Query} - -\description{Get SSURGO Data via Spatial Query to SoilWeb} -\usage{ -SoilWeb_spatial_query(bbox = NULL, coords = NULL, what = "mapunit", source = "soilweb") -} - -\arguments{ - \item{bbox}{a bounding box in WGS84 geographic coordinates, see examples} - \item{coords}{a coordinate pair in WGS84 geographic coordinates, see examples} - \item{what}{data to query, currently ignored} - \item{source}{the data source, currently ignored} -} - -\note{This function should be considered experimental; arguments, results, and side-effects could change at any time. SDA now supports spatial queries, consider using \code{\link{SDA_query_features}} instead.} - -\details{Data are currently available from SoilWeb. These data are a snapshot of the "official" data. The snapshot date is encoded in the "soilweb_last_update" column in the function return value. Planned updates to this function will include a switch to determine the data source: "official" data via USDA-NRCS servers, or a "snapshot" via SoilWeb.} - -\value{The data returned from this function will depend on the query style. See examples below.} - -\author{D.E. Beaudette} - -\examples{ -\donttest{ -# query by bbox -SoilWeb_spatial_query(bbox=c(-122.05, 37, -122, 37.05)) - -# query by coordinate pair -SoilWeb_spatial_query(coords=c(-121, 38)) -} -} - -\keyword{manip} - +\name{SoilWeb_spatial_query} +\alias{SoilWeb_spatial_query} + +\title{Get SSURGO Data via Spatial Query} + +\description{Get SSURGO Data via Spatial Query to SoilWeb} +\usage{ +SoilWeb_spatial_query(bbox = NULL, coords = NULL, what = "mapunit", source = "soilweb") +} + +\arguments{ + \item{bbox}{a bounding box in WGS84 geographic coordinates, see examples} + \item{coords}{a coordinate pair in WGS84 geographic coordinates, see examples} + \item{what}{data to query, currently ignored} + \item{source}{the data source, currently ignored} +} + +\note{This function should be considered experimental; arguments, results, and side-effects could change at any time. SDA now supports spatial queries, consider using \code{\link{SDA_query_features}} instead.} + +\details{Data are currently available from SoilWeb. These data are a snapshot of the "official" data. The snapshot date is encoded in the "soilweb_last_update" column in the function return value. Planned updates to this function will include a switch to determine the data source: "official" data via USDA-NRCS servers, or a "snapshot" via SoilWeb.} + +\value{The data returned from this function will depend on the query style. See examples below.} + +\author{D.E. Beaudette} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # query by bbox + SoilWeb_spatial_query(bbox=c(-122.05, 37, -122, 37.05)) + + # query by coordinate pair + SoilWeb_spatial_query(coords=c(-121, 38)) +} +} +} + +\keyword{manip} + diff --git a/man/fetchHenry.Rd b/man/fetchHenry.Rd index 01bcaa4e..5cc5d657 100644 --- a/man/fetchHenry.Rd +++ b/man/fetchHenry.Rd @@ -1,60 +1,64 @@ -\name{fetchHenry} -\alias{fetchHenry} -\alias{month2season} -\alias{summarizeSoilTemperature} -\alias{HenryTimeLine} - -\title{Download Data from the Henry Mount Soil Temperature and Water Database} - -\description{This function is a front-end to the REST query functionality of the Henry Mount Soil Temperature and Water Database.} - -\usage{ -fetchHenry(what='all', usersiteid = NULL, project = NULL, sso = NULL, -gran = "day", start.date = NULL, stop.date = NULL, -pad.missing.days = TRUE, soiltemp.summaries = TRUE) -} - -\arguments{ - \item{what}{type of data to return: 'sensors': sensor metadata only | 'soiltemp': sensor metadata + soil temperature data | 'soilVWC': sensor metadata + soil moisture data | 'airtemp': sensor metadata + air temperature data | 'waterlevel': sensor metadata + water level data |'all': sensor metadata + all sensor data} - \item{usersiteid}{(optional) filter results using a NASIS user site ID} - \item{project}{(optional) filter results using a project ID} - \item{sso}{(optional) filter results using a soil survey office code} - \item{gran}{data granularity: "day", "week", "month", "year"; returned data are averages} - \item{start.date}{(optional) starting date filter} - \item{stop.date}{(optional) ending date filter} - \item{pad.missing.days}{should missing data ("day" granularity) be filled with NA? see details} - \item{soiltemp.summaries}{should soil temperature ("day" granularity only) be summarized? see details} -} - -\details{Filling missing days with NA is useful for computing and index of how complete the data are, and for estimating (mostly) unbiased MAST and seasonal mean soil temperatures. Summaries are computed by first averaging over Julian day, then averaging over all days of the year (MAST) or just those days that occur within "summer" or "winter". This approach makes it possible to estimate summaries in the presence of missing data. The quality of summaries should be weighted by the number of "functional years" (number of years with non-missing data after combining data by Julian day) and "complete years" (number of years of data with >= 365 days of non-missing data).} - -\value{a list containing: - \item{sensors}{a \code{SpatialPointsDataFrame} object containing site-level information} - \item{soiltemp}{a \code{data.frame} object containing soil temperature timeseries data} - \item{soilVWC}{a \code{data.frame} object containing soil moisture timeseries data} - \item{airtemp}{a \code{data.frame} object containing air temperature timeseries data} - \item{waterlevel}{a \code{data.frame} object containing water level timeseries data} -} - -\author{D.E. Beaudette} -\note{This function and the back-end database are very much a work in progress.} - -\seealso{\code{\link{fetchSCAN}}} -\examples{ -\donttest{ -library(lattice) - -# get CA630 data as daily averages -x <- fetchHenry(project='CA630', gran = 'day') - -# inspect data gaps -levelplot(factor(!is.na(sensor_value)) ~ doy * factor(year) | name, -data=x$soiltemp, col.regions=c('grey', 'RoyalBlue'), cuts=1, -colorkey=FALSE, as.table=TRUE, scales=list(alternating=3), -par.strip.text=list(cex=0.75), strip=strip.custom(bg='yellow'), -xlab='Julian Day', ylab='Year') -} -} - -\keyword{manip} - +\name{fetchHenry} +\alias{fetchHenry} +\alias{month2season} +\alias{summarizeSoilTemperature} +\alias{HenryTimeLine} + +\title{Download Data from the Henry Mount Soil Temperature and Water Database} + +\description{This function is a front-end to the REST query functionality of the Henry Mount Soil Temperature and Water Database.} + +\usage{ +fetchHenry(what='all', usersiteid = NULL, project = NULL, sso = NULL, +gran = "day", start.date = NULL, stop.date = NULL, +pad.missing.days = TRUE, soiltemp.summaries = TRUE) +} + +\arguments{ + \item{what}{type of data to return: 'sensors': sensor metadata only | 'soiltemp': sensor metadata + soil temperature data | 'soilVWC': sensor metadata + soil moisture data | 'airtemp': sensor metadata + air temperature data | 'waterlevel': sensor metadata + water level data |'all': sensor metadata + all sensor data} + \item{usersiteid}{(optional) filter results using a NASIS user site ID} + \item{project}{(optional) filter results using a project ID} + \item{sso}{(optional) filter results using a soil survey office code} + \item{gran}{data granularity: "day", "week", "month", "year"; returned data are averages} + \item{start.date}{(optional) starting date filter} + \item{stop.date}{(optional) ending date filter} + \item{pad.missing.days}{should missing data ("day" granularity) be filled with NA? see details} + \item{soiltemp.summaries}{should soil temperature ("day" granularity only) be summarized? see details} +} + +\details{Filling missing days with NA is useful for computing and index of how complete the data are, and for estimating (mostly) unbiased MAST and seasonal mean soil temperatures. Summaries are computed by first averaging over Julian day, then averaging over all days of the year (MAST) or just those days that occur within "summer" or "winter". This approach makes it possible to estimate summaries in the presence of missing data. The quality of summaries should be weighted by the number of "functional years" (number of years with non-missing data after combining data by Julian day) and "complete years" (number of years of data with >= 365 days of non-missing data).} + +\value{a list containing: + \item{sensors}{a \code{SpatialPointsDataFrame} object containing site-level information} + \item{soiltemp}{a \code{data.frame} object containing soil temperature timeseries data} + \item{soilVWC}{a \code{data.frame} object containing soil moisture timeseries data} + \item{airtemp}{a \code{data.frame} object containing air temperature timeseries data} + \item{waterlevel}{a \code{data.frame} object containing water level timeseries data} +} + +\author{D.E. Beaudette} +\note{This function and the back-end database are very much a work in progress.} + +\seealso{\code{\link{fetchSCAN}}} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + require(lattice)) { + + # get CA630 data as daily averages + x <- fetchHenry(project='CA630', gran = 'day') + + # inspect data gaps + levelplot(factor(!is.na(sensor_value)) ~ doy * factor(year) | name, + data=x$soiltemp, col.regions=c('grey', 'RoyalBlue'), cuts=1, + colorkey=FALSE, as.table=TRUE, scales=list(alternating=3), + par.strip.text=list(cex=0.75), strip=strip.custom(bg='yellow'), + xlab='Julian Day', ylab='Year') + +} +} +} + +\keyword{manip} + diff --git a/man/fetchKSSL.Rd b/man/fetchKSSL.Rd index fd35c386..1c6cefe0 100644 --- a/man/fetchKSSL.Rd +++ b/man/fetchKSSL.Rd @@ -1,112 +1,115 @@ -\name{fetchKSSL} -\alias{fetchKSSL} -\title{Fetch KSSL Data} -\description{Get soil characterization and morphologic data via BBOX, MLRA, or series name query, from the KSSL database.} - -\usage{fetchKSSL(series=NULL, bbox=NULL, mlra=NULL, pedlabsampnum=NULL, -pedon_id=NULL, pedon_key=NULL, returnMorphologicData=FALSE, simplifyColors=FALSE)} - -\arguments{ - \item{series}{a single soil series name, case insensitive} - \item{bbox}{a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}} - \item{mlra}{a single MLRA ID, e.g. "18" or "22A"} - \item{pedlabsampnum}{a single KSSL pedon lab sample number} - \item{pedon_id}{a single user pedon ID} - \item{pedon_key}{a single KSSL internal pedon ID} - \item{returnMorphologicData}{optionally request basic morphologic data, see details section} - \item{simplifyColors}{simplify colors (from morphologic data) and join with horizon data} -} - - - -\details{This is an experimental interface to a subset for the most commonly used data from a snapshot of KSSL (lab characterization) and NASIS (morphologic) data. The snapshots were last updated September 2018 (KSSL / NASIS). - -Series-queries are case insensitive. Series name is based on the "correlated as" field (from KSSL snapshot) when present. The "sampled as" classification was promoted to "correlated as" if the "correlated as" classification was missing. - -When \code{returnMorphologicData} is TRUE, the resulting object is a list. The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} object) is stored in the named element "SPC". The additional elements are basic morphologic data: horizon colors, rock fragments, pores, and structure. There is a 1:many relationship between the horizon data in "SPC" and the additional dataframes in \code{morph}. See examples for ideas on how to "flatten" these tables. - -Setting \code{simplifyColors=TRUE} will automatically flatten the soil color data and join to horizon level attributes. - -Function arguments (\code{series}, \code{mlra}, etc.) are NOT vectorized: the first element of a vector will be used when supplied as a filter. See the \href{http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html}{fetchKSSL tutorial} for ideas on how to iterate over a set of IDs. -) -} - -\value{a \code{SoilProfileCollection} object when \code{returnMorphologicData} is FALSE, otherwise a list.} - -\author{D.E. Beaudette} -\note{SoilWeb maintains a snapshot of these KSSL and NASIS data. The SoilWeb snapshot was developed using methods described here: \url{https://github.com/dylanbeaudette/process-kssl-snapshot}. Please use the link below for the live data.} - -\references{ -\url{http://ncsslabdatamart.sc.egov.usda.gov/} -} - -\seealso{\code{\link{fetchOSD}}} -\examples{ -\donttest{ -# search by series name -s <- fetchKSSL(series='auburn') - -# search by bounding-box -# s <- fetchKSSL(bbox=c(-120, 37, -122, 38)) - -# how many pedons -length(s) - -# plot -if(requireNamespace("sp")) { - par(mar=c(0,0,0,0)) - sp::plot(s, name='hzn_desgn', max.depth=150) -} -## -## morphologic data -## - -library(soilDB) -library(aqp) -library(plyr) -library(reshape2) - - -# get lab and morphologic data -s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE) - -# extract SPC -pedons <- s$SPC - -## simplify color data manually -s.colors <- simplifyColorData(s$morph$phcolor, id.var = 'labsampnum', wt='colorpct') - -# merge color data into SPC -h <- horizons(pedons) -h <- join(h, s.colors, by='labsampnum', type='left', match='first') -horizons(pedons) <- h - -# check -par(mar=c(0,0,0,0)) -plot(pedons, color='moist_soil_color', print.id=FALSE) - -## automatically simplify color data -s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE, simplifyColors=TRUE) - -# check -par(mar=c(0,0,0,0)) -plot(pedons, color='moist_soil_color', print.id=FALSE) - - -# simplify fragment data -s.frags <- simplifyFragmentData(s$morph$phfrags, id.var='labsampnum') - -# merge fragment data into SPC -h <- horizons(pedons) -h <- join(h, s.frags, by='labsampnum', type='left', match='first') -horizons(pedons) <- h - - -# check -par(mar=c(0,0,3,0)) -plot(pedons, color='total_frags_pct', print.id=FALSE) - -} -} - -\keyword{utilities} +\name{fetchKSSL} +\alias{fetchKSSL} +\title{Fetch KSSL Data} +\description{Get soil characterization and morphologic data via BBOX, MLRA, or series name query, from the KSSL database.} + +\usage{fetchKSSL(series=NULL, bbox=NULL, mlra=NULL, pedlabsampnum=NULL, +pedon_id=NULL, pedon_key=NULL, returnMorphologicData=FALSE, simplifyColors=FALSE)} + +\arguments{ + \item{series}{a single soil series name, case insensitive} + \item{bbox}{a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}} + \item{mlra}{a single MLRA ID, e.g. "18" or "22A"} + \item{pedlabsampnum}{a single KSSL pedon lab sample number} + \item{pedon_id}{a single user pedon ID} + \item{pedon_key}{a single KSSL internal pedon ID} + \item{returnMorphologicData}{optionally request basic morphologic data, see details section} + \item{simplifyColors}{simplify colors (from morphologic data) and join with horizon data} +} + + + +\details{This is an experimental interface to a subset for the most commonly used data from a snapshot of KSSL (lab characterization) and NASIS (morphologic) data. The snapshots were last updated September 2018 (KSSL / NASIS). + +Series-queries are case insensitive. Series name is based on the "correlated as" field (from KSSL snapshot) when present. The "sampled as" classification was promoted to "correlated as" if the "correlated as" classification was missing. + +When \code{returnMorphologicData} is TRUE, the resulting object is a list. The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} object) is stored in the named element "SPC". The additional elements are basic morphologic data: horizon colors, rock fragments, pores, and structure. There is a 1:many relationship between the horizon data in "SPC" and the additional dataframes in \code{morph}. See examples for ideas on how to "flatten" these tables. + +Setting \code{simplifyColors=TRUE} will automatically flatten the soil color data and join to horizon level attributes. + +Function arguments (\code{series}, \code{mlra}, etc.) are NOT vectorized: the first element of a vector will be used when supplied as a filter. See the \href{http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html}{fetchKSSL tutorial} for ideas on how to iterate over a set of IDs. +) +} + +\value{a \code{SoilProfileCollection} object when \code{returnMorphologicData} is FALSE, otherwise a list.} + +\author{D.E. Beaudette} +\note{SoilWeb maintains a snapshot of these KSSL and NASIS data. The SoilWeb snapshot was developed using methods described here: \url{https://github.com/dylanbeaudette/process-kssl-snapshot}. Please use the link below for the live data.} + +\references{ +\url{http://ncsslabdatamart.sc.egov.usda.gov/} +} + +\seealso{\code{\link{fetchOSD}}} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # search by series name + s <- fetchKSSL(series='auburn') + + # search by bounding-box + # s <- fetchKSSL(bbox=c(-120, 37, -122, 38)) + + # how many pedons + length(s) + + # plot + if(requireNamespace("sp")) { + par(mar=c(0,0,0,0)) + sp::plot(s, name='hzn_desgn', max.depth=150) + } + ## + ## morphologic data + ## + + library(soilDB) + library(aqp) + library(plyr) + library(reshape2) + + + # get lab and morphologic data + s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE) + + # extract SPC + pedons <- s$SPC + + ## simplify color data manually + s.colors <- simplifyColorData(s$morph$phcolor, id.var = 'labsampnum', wt='colorpct') + + # merge color data into SPC + h <- horizons(pedons) + h <- join(h, s.colors, by='labsampnum', type='left', match='first') + horizons(pedons) <- h + + # check + par(mar=c(0,0,0,0)) + plot(pedons, color='moist_soil_color', print.id=FALSE) + + ## automatically simplify color data + s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE, simplifyColors=TRUE) + + # check + par(mar=c(0,0,0,0)) + plot(pedons, color='moist_soil_color', print.id=FALSE) + + + # simplify fragment data + s.frags <- simplifyFragmentData(s$morph$phfrags, id.var='labsampnum') + + # merge fragment data into SPC + h <- horizons(pedons) + h <- join(h, s.frags, by='labsampnum', type='left', match='first') + horizons(pedons) <- h + + + # check + par(mar=c(0,0,3,0)) + plot(pedons, color='total_frags_pct', print.id=FALSE) +} +} +} + +\keyword{utilities} diff --git a/man/fetchNASISWebReport.Rd b/man/fetchNASISWebReport.Rd index ca67b021..7b3a1bb4 100644 --- a/man/fetchNASISWebReport.Rd +++ b/man/fetchNASISWebReport.Rd @@ -75,10 +75,11 @@ get_sitesoilmoist_from_NASISWebReport(usiteid) \donttest{ -if ( - require("aqp") & - require("ggplot2") & - require("gridExtra") +if (requireNamespace("curl") & + curl::has_internet() & + require("aqp") & + require("ggplot2") & + require("gridExtra") ) { # query soil components by projectname test = fetchNASISWebReport( diff --git a/man/fetchOSD.Rd b/man/fetchOSD.Rd index aa855d0d..60cfe2af 100644 --- a/man/fetchOSD.Rd +++ b/man/fetchOSD.Rd @@ -1,79 +1,85 @@ -\name{fetchOSD} -\alias{fetchOSD} - -\title{Fetch Data by Soil Series Name} - -\description{This functions fetches a varity of data associated with named soil series, extracted from the USDA-NRCS Official Series Description text files and detailed soil survey (SSURGO). These data are periodically updated and made available via SoilWeb.} - -\usage{ -fetchOSD(soils, colorState = 'moist', extended=FALSE) -} - -\arguments{ - \item{soils}{a character vector of named soil series, case insensitive} - \item{colorState}{color state for horizon soil color visualization: "moist" or "dry"} - \item{extended}{if TRUE additional soil series summary data are returned, see details} -} - -\details{ -The standard set of "site" and "horizon" data are returned as a \code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" suite of summary data can be requested by setting \code{extended=TRUE}. The resulting object will be a \code{list} with the following elements:) - -\describe{ - \item{SPC}{\code{SoilProfileCollection} containing standards "site" and "horizon" data} - \item{competing}{competing soil series from the SC database snapshot} - \item{geomcomp}{empirical probabilities for geomorphic component, derrived from the current SSURGO snapshot} - \item{hillpos}{empirical probabilities for hillslope position, derrived from the current SSURGO snapshot} - \item{mtnpos}{empirical probabilities for mountain slope position, derrived from the current SSURGO snapshot} - \item{pmkind}{empirical probabilities for parent material kind, derrived from the current SSURGO snapshot} - \item{pmorigin}{empirical probabilities for parent material origin, derrived from the current SSURGO snapshot} - \item{mlra}{empirical MLRA membership values, derrived from the current SSURGO snapshot} - \item{climate}{experimental climate summaries from PRISM stack} - \item{metadata}{metadata associated with SoilWeb cached summaries} - } - -Further details pending. -} - -\value{a \code{SoilProfileCollection} object containing basic soil morphology and taxonomic information.} - -\references{ -USDA-NRCS OSD search tools: \url{http://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} -} - -\author{D.E. Beaudette} - -\note{SoilWeb maintains a snapshot of the Official Series Description data. Please use the link above for the live data.} - -\seealso{ -\link{OSDquery}, \link{siblings} -} - -\examples{ -\donttest{ -# soils of interest -s.list <- c('musick', 'cecil', 'drummer', 'amador', 'pentz', -'reiff', 'san joaquin', 'montpellier', 'grangeville', 'pollasky', 'ramona') - -# fetch and convert data into an SPC -s.moist <- fetchOSD(s.list, colorState='moist') -s.dry <- fetchOSD(s.list, colorState='dry') - -# plot profiles -# moist soil colors -if(require("aqp")) { - - par(mar=c(0,0,0,0), mfrow=c(2,1)) - plot(s.moist, name='hzname', cex.names=0.85, axis.line.offset=-4) - plot(s.dry, name='hzname', cex.names=0.85, axis.line.offset=-4) - - # extended mode: return a list with SPC + summary tables - x <- fetchOSD(s.list, extended = TRUE, colorState = 'dry') - - par(mar=c(0,0,1,1)) - plot(x$SPC) -} -str(x, 1) -} -} - -\keyword{manip} +\name{fetchOSD} +\alias{fetchOSD} + +\title{Fetch Data by Soil Series Name} + +\description{This functions fetches a varity of data associated with named soil series, extracted from the USDA-NRCS Official Series Description text files and detailed soil survey (SSURGO). These data are periodically updated and made available via SoilWeb.} + +\usage{ +fetchOSD(soils, colorState = 'moist', extended=FALSE) +} + +\arguments{ + \item{soils}{a character vector of named soil series, case insensitive} + \item{colorState}{color state for horizon soil color visualization: "moist" or "dry"} + \item{extended}{if TRUE additional soil series summary data are returned, see details} +} + +\details{ +The standard set of "site" and "horizon" data are returned as a \code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" suite of summary data can be requested by setting \code{extended=TRUE}. The resulting object will be a \code{list} with the following elements:) + +\describe{ + \item{SPC}{\code{SoilProfileCollection} containing standards "site" and "horizon" data} + \item{competing}{competing soil series from the SC database snapshot} + \item{geomcomp}{empirical probabilities for geomorphic component, derrived from the current SSURGO snapshot} + \item{hillpos}{empirical probabilities for hillslope position, derrived from the current SSURGO snapshot} + \item{mtnpos}{empirical probabilities for mountain slope position, derrived from the current SSURGO snapshot} + \item{pmkind}{empirical probabilities for parent material kind, derrived from the current SSURGO snapshot} + \item{pmorigin}{empirical probabilities for parent material origin, derrived from the current SSURGO snapshot} + \item{mlra}{empirical MLRA membership values, derrived from the current SSURGO snapshot} + \item{climate}{experimental climate summaries from PRISM stack} + \item{metadata}{metadata associated with SoilWeb cached summaries} + } + +Further details pending. +} + +\value{a \code{SoilProfileCollection} object containing basic soil morphology and taxonomic information.} + +\references{ +USDA-NRCS OSD search tools: \url{http://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} +} + +\author{D.E. Beaudette} + +\note{SoilWeb maintains a snapshot of the Official Series Description data. Please use the link above for the live data.} + +\seealso{ +\link{OSDquery}, \link{siblings} +} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # soils of interest + s.list <- c('musick', 'cecil', 'drummer', 'amador', 'pentz', + 'reiff', 'san joaquin', 'montpellier', 'grangeville', 'pollasky', 'ramona') + + # fetch and convert data into an SPC + s.moist <- fetchOSD(s.list, colorState='moist') + s.dry <- fetchOSD(s.list, colorState='dry') + + # plot profiles + # moist soil colors + if(require("aqp")) { + + par(mar=c(0,0,0,0), mfrow=c(2,1)) + plot(s.moist, name='hzname', + cex.names=0.85, axis.line.offset=-4) + plot(s.dry, name='hzname', + cex.names=0.85, axis.line.offset=-4) + + # extended mode: return a list with SPC + summary tables + x <- fetchOSD(s.list, extended = TRUE, colorState = 'dry') + + par(mar=c(0,0,1,1)) + plot(x$SPC) + str(x, 1) + } +} +} +} + +\keyword{manip} diff --git a/man/fetchRaCA.Rd b/man/fetchRaCA.Rd index fb9703b0..ac2e08af 100644 --- a/man/fetchRaCA.Rd +++ b/man/fetchRaCA.Rd @@ -1,60 +1,65 @@ -\name{fetchRaCA} -\alias{fetchRaCA} -\title{Fetch KSSL Data (EXPERIMENTAL)} -\description{Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, RaCA site ID, or series query from the SoilWeb system.} -\usage{fetchRaCA(series = NULL, bbox = NULL, state = NULL, rcasiteid = NULL, get.vnir = FALSE)} - -\arguments{ - \item{series}{a soil series name, case insensitive} - \item{bbox}{a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}, constrained to a 5-degree block} - \item{state}{a two-letter US state abbreviation, case insensitive} - \item{rcasiteid}{an RaCA site id (e.g. 'C1609C01')} - \item{get.vnir}{boolean, should associated VNIR spectra be downloaded? (see details)} -} - -\value{ - \describe{ - \item{\code{pedons}:}{a \code{SoilProfileCollection} object containing site/pedon/horizon data} - \item{\code{trees}:}{a \code{data.frame} object containing tree DBH and height} - \item{\code{veg}:}{a \code{data.frame} object containing plant species} - \item{\code{stock}:}{a \code{data.frame} object containing carbon quantities (stocks) at standardized depths} - \item{\code{sample}:}{a \code{data.frame} object containing sample-level bulk density and soil organic carbon values} - \item{\code{spectra}:}{a numeric \code{matrix} containing VNIR reflectance spectra from 350--2500 nm} - } -} - -\details{The VNIR spectra associated with RaCA data are quite large [each gzip-compressed VNIR spectra record is about 6.6kb], so requests for these data are disabled by default. Note that VNIR spectra can only be queried by soil series or geographic BBOX.} - -\references{ - \url{http://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/?cid=nrcs142p2_054164} - \href{https://r-forge.r-project.org/scm/viewvc.php/*checkout*/docs/soilDB/RaCA-demo.html?root=aqp}{fetchRaCA() Tutorial} -} -\author{D.E. Beaudette, USDA-NRCS staff} - -\seealso{\code{\link{fetchOSD}}} -\examples{ -\donttest{ -if(require(aqp)) { - # search by series name - s <- fetchRaCA(series='auburn') - - # search by bounding-box - # s <- fetchRaCA(bbox=c(-120, 37, -122, 38)) - - # check structure - str(s, 1) - - # extract pedons - p <- s$pedons - - # how many pedons - length(p) - - # plot - par(mar=c(0,0,0,0)) - plot(p, name='hzn_desgn', max.depth=150) -} -} -} - -\keyword{utilities} +\name{fetchRaCA} +\alias{fetchRaCA} +\title{Fetch KSSL Data (EXPERIMENTAL)} +\description{Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, RaCA site ID, or series query from the SoilWeb system.} +\usage{fetchRaCA(series = NULL, bbox = NULL, state = NULL, rcasiteid = NULL, get.vnir = FALSE)} + +\arguments{ + \item{series}{a soil series name, case insensitive} + \item{bbox}{a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}, constrained to a 5-degree block} + \item{state}{a two-letter US state abbreviation, case insensitive} + \item{rcasiteid}{an RaCA site id (e.g. 'C1609C01')} + \item{get.vnir}{boolean, should associated VNIR spectra be downloaded? (see details)} +} + +\value{ + \describe{ + \item{\code{pedons}:}{a \code{SoilProfileCollection} object containing site/pedon/horizon data} + \item{\code{trees}:}{a \code{data.frame} object containing tree DBH and height} + \item{\code{veg}:}{a \code{data.frame} object containing plant species} + \item{\code{stock}:}{a \code{data.frame} object containing carbon quantities (stocks) at standardized depths} + \item{\code{sample}:}{a \code{data.frame} object containing sample-level bulk density and soil organic carbon values} + \item{\code{spectra}:}{a numeric \code{matrix} containing VNIR reflectance spectra from 350--2500 nm} + } +} + +\details{The VNIR spectra associated with RaCA data are quite large [each gzip-compressed VNIR spectra record is about 6.6kb], so requests for these data are disabled by default. Note that VNIR spectra can only be queried by soil series or geographic BBOX.} + +\references{ + \url{http://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/?cid=nrcs142p2_054164} + \href{https://r-forge.r-project.org/scm/viewvc.php/*checkout*/docs/soilDB/RaCA-demo.html?root=aqp}{fetchRaCA() Tutorial} +} +\author{D.E. Beaudette, USDA-NRCS staff} + +\seealso{\code{\link{fetchOSD}}} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + if(require(aqp)) { + + # search by series name + s <- fetchRaCA(series='auburn') + + # search by bounding-box + # s <- fetchRaCA(bbox=c(-120, 37, -122, 38)) + + # check structure + str(s, 1) + + # extract pedons + p <- s$pedons + + # how many pedons + length(p) + + # plot + par(mar=c(0,0,0,0)) + plot(p, name='hzn_desgn', max.depth=150) +} +} +} +} + +\keyword{utilities} diff --git a/man/fetchSCAN.Rd b/man/fetchSCAN.Rd index c51a32b6..4d4169cc 100644 --- a/man/fetchSCAN.Rd +++ b/man/fetchSCAN.Rd @@ -1,50 +1,54 @@ -\name{fetchSCAN} -\alias{fetchSCAN} -\alias{SCAN_sensor_metadata} -\alias{SCAN_site_metadata} - - -\title{Fetch SCAN Data} -\description{Query soil/climate data from USDA-NRCS SCAN Stations (experimental)} - -\usage{ -# get SCAN data -fetchSCAN(site.code, year, report='SCAN', req=NULL) - -# get sensor metadata for one or more sites -SCAN_sensor_metadata(site.code) - -# get site metadata for one or more sites -SCAN_site_metadata(site.code) -} - -\arguments{ - \item{site.code}{a vector of site codes} - \item{year}{a vector of years} - \item{report}{report name, single value only} - \item{req}{list of SCAN request parameters, for backwards-compatibility only} -} - -\details{See \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{the fetchSCAN tutorial for details.} These functions require the `httr` and `rvest` libraries.} - -\note{\code{SCAN_sensor_metadata()} is known to crash on 32bit R / libraries (Windows).} - -\value{a \code{data.frame} object} -\references{https://www.wcc.nrcs.usda.gov/index.html} -\author{D.E. Beaudette} - -\examples{ -\donttest{ -# get data: new interface -x <- fetchSCAN(site.code=c(356, 2072), year=c(2015, 2016)) -str(x) - -# get sensor metadata -m <- SCAN_sensor_metadata(site.code=c(356, 2072)) - -# get site metadata -m <- SCAN_site_metadata(site.code=c(356, 2072)) -} -} -\keyword{manip} - +\name{fetchSCAN} +\alias{fetchSCAN} +\alias{SCAN_sensor_metadata} +\alias{SCAN_site_metadata} + + +\title{Fetch SCAN Data} +\description{Query soil/climate data from USDA-NRCS SCAN Stations (experimental)} + +\usage{ +# get SCAN data +fetchSCAN(site.code, year, report='SCAN', req=NULL) + +# get sensor metadata for one or more sites +SCAN_sensor_metadata(site.code) + +# get site metadata for one or more sites +SCAN_site_metadata(site.code) +} + +\arguments{ + \item{site.code}{a vector of site codes} + \item{year}{a vector of years} + \item{report}{report name, single value only} + \item{req}{list of SCAN request parameters, for backwards-compatibility only} +} + +\details{See \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{the fetchSCAN tutorial for details.} These functions require the `httr` and `rvest` libraries.} + +\note{\code{SCAN_sensor_metadata()} is known to crash on 32bit R / libraries (Windows).} + +\value{a \code{data.frame} object} +\references{https://www.wcc.nrcs.usda.gov/index.html} +\author{D.E. Beaudette} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # get data: new interface + x <- fetchSCAN(site.code=c(356, 2072), year=c(2015, 2016)) + str(x) + + # get sensor metadata + m <- SCAN_sensor_metadata(site.code=c(356, 2072)) + + # get site metadata + m <- SCAN_site_metadata(site.code=c(356, 2072)) +} +} +} +\keyword{manip} + diff --git a/man/fetchSDA_component.Rd b/man/fetchSDA_component.Rd index 56da9319..96c90745 100644 --- a/man/fetchSDA_component.Rd +++ b/man/fetchSDA_component.Rd @@ -1,190 +1,189 @@ -\name{fetchSDA} -\alias{fetchSDA} -\alias{get_legend_from_SDA} -\alias{get_lmuaoverlap_from_SDA} -\alias{get_mapunit_from_SDA} -\alias{get_component_from_SDA} -\alias{get_chorizon_from_SDA} -\alias{get_cosoilmoist_from_SDA} -\alias{get_cointerp_from_SDA} - -\title{Download and Flatten Data from Soil Data Access} -\description{Functions to download and flatten commonly used tables and from Soil Data Access, and create soil profile collection objects (SPC).} -\usage{ -fetchSDA(WHERE = NULL, duplicates = FALSE, childs = TRUE, - nullFragsAreZero = TRUE, rmHzErrors = FALSE, - droplevels = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) - -get_mapunit_from_SDA(WHERE = NULL, - droplevels = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) - -get_component_from_SDA(WHERE = NULL, duplicates = FALSE, childs = TRUE, - droplevels = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) - -get_chorizon_from_SDA(WHERE = NULL, duplicates = FALSE, childs = TRUE, - nullFragsAreZero = TRUE, - droplevels = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) - -get_cosoilmoist_from_SDA(WHERE = NULL, duplicates = FALSE, impute = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) - -} - - -\arguments{ - \item{WHERE}{text string formated as an SQL WHERE clause (default: FALSE)} - \item{duplicates}{logical; if TRUE a record is returned for each unique mukey (may be many per nationalmusym)} - \item{childs}{logical; if FALSE parent material and geomorphic child tables are not flattened and appended} - \item{impute}{replace missing (i.e. NULL) values with "Not_Populated" for categorical data, or the "RV" for numeric data or 201 cm if the "RV" is also NULL (default: TRUE)} - \item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? (default: TRUE), see details} - \item{rmHzErrors}{should pedons with horizonation errors be removed from the results? (default: FALSE)} - \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} - } - - -\details{These functions return data from Soil Data Access with the use of a simple text string that formated as an SQL WHERE clause (e.g. \code{WHERE = "areasymbol = 'IN001'"}. All functions are SQL querys that wrap around \code{SDAquery()} and format the data for analysis. - -Beware SDA includes the data for both SSURGO and STATSGO2. The \code{areasymbol} for STATSGO2 is \code{US}. Therefore if data from just SSURGO is desired, set \code{WHERE = "areareasymbol != 'US'"}. - -If the duplicates argument is set to TRUE, duplicate components are returned. This is not necessary with data returned from NASIS, which has one unique national map unit. SDA has duplicate map national map units, one for each legend it exists in. - -The value of \code{nullFragsAreZero} will have a significant impact on the rock fragment fractions returned by \code{fetchSDA}. Set \code{nullFragsAreZero = FALSE} in those cases where there are many data-gaps and NULL rock fragment values should be interpretated as NULLs. Set \code{nullFragsAreZero = TRUE} in those cases where NULL rock fragment values should be interpreted as 0. - -} -\value{A dataframe or soil profile collection object.} -\author{Stephen Roecker} - - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - -\seealso{ -\link{SDA_query} -} - - -\examples{ -\donttest{ - - -if ( - require(aqp) & - require("ggplot2") & - require("gridExtra") & - require("viridis") -) { - - # query soil components by areasymbol and musym - test = fetchSDA(WHERE = "areasymbol = 'IN005' AND musym = 'MnpB2'") - - - # profile plot - plot(test) - - - # convert the data for depth plot - clay_slice = horizons(slice(test, 0:200 ~ claytotal_l + claytotal_r + claytotal_h)) - names(clay_slice) <- gsub("claytotal_", "", names(clay_slice)) - - om_slice = horizons(slice(test, 0:200 ~ om_l + om_r + om_h)) - names(om_slice) = gsub("om_", "", names(om_slice)) - - test2 = rbind(data.frame(clay_slice, var = "clay"), - data.frame(om_slice, var = "om") - ) - - h = merge(test2, site(test)[c("nationalmusym", "cokey", "compname", "comppct_r")], - by = "cokey", - all.x = TRUE - ) - - # depth plot of clay content by soil component - gg_comp <- function(x) { - ggplot(x) + - geom_line(aes(y = r, x = hzdept_r)) + - geom_line(aes(y = r, x = hzdept_r)) + - geom_ribbon(aes(ymin = l, ymax = h, x = hzdept_r), alpha = 0.2) + - xlim(200, 0) + - xlab("depth (cm)") + - facet_grid(var ~ nationalmusym + paste(compname, comppct_r)) + - coord_flip() - } - g1 <- gg_comp(subset(h, var == "clay")) - g2 <- gg_comp(subset(h, var == "om")) - - grid.arrange(g1, g2) - - - # query cosoilmoist (e.g. water table data) by mukey - x <- get_cosoilmoist_from_SDA(WHERE = "mukey = '1395352'") - - ggplot(x, aes(x = as.integer(month), y = dept_r, lty = status)) + - geom_rect(aes(xmin = as.integer(month), xmax = as.integer(month) + 1, - ymin = 0, ymax = max(x$depb_r), - fill = flodfreqcl)) + - geom_line(cex = 1) + - geom_point() + - geom_ribbon(aes(ymin = dept_l, ymax = dept_h), alpha = 0.2) + - ylim(max(x$depb_r), 0) + - xlab("month") + ylab("depth (cm)") + - scale_x_continuous(breaks = 1:12, labels = month.abb, name="Month") + - facet_wrap(~ paste0(compname, ' (', comppct_r , ')')) + - ggtitle(paste0(x$nationalmusym[1], - ': Water Table Levels from Component Soil Moisture Month Data')) - - - - # query all Miami major components - s <- get_component_from_SDA(WHERE = "compname = 'Miami' \n - AND majcompflag = 'Yes' AND areasymbol != 'US'") - - - # landform vs 3-D morphometry - test <- { - subset(s, ! is.na(landform) | ! is.na(geompos)) ->.; - split(., .$drainagecl, drop = TRUE) ->.; - lapply(., function(x) { - test = data.frame() - test = as.data.frame(table(x$landform, x$geompos)) - test$compname = x$compname[1] - test$drainagecl = x$drainagecl[1] - names(test)[1:2] <- c("landform", "geompos") - return(test) - }) ->.; - do.call("rbind", .) ->.; - .[.$Freq > 0, ] ->.; - within(., { - landform = reorder(factor(landform), Freq, max) - geompos = reorder(factor(geompos), Freq, max) - geompos = factor(geompos, levels = rev(levels(geompos))) - }) ->.; - } - test$Freq2 <- cut(test$Freq, - breaks = c(0, 5, 10, 25, 50, 100, 150), - labels = c("<5", "5-10", "10-25", "25-50", "50-100", "100-150") - ) - ggplot(test, aes(x = geompos, y = landform, fill = Freq2)) + - geom_tile(alpha = 0.5) + facet_wrap(~ paste0(compname, "\n", drainagecl)) + - scale_fill_viridis(discrete = TRUE) + - theme(aspect.ratio = 1, axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1)) + - ggtitle("Landform vs 3-D Morphometry for Miami Major Components on SDA") - - -} - - - -} -} -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. -\keyword{manip} +\name{fetchSDA} +\alias{fetchSDA} +\alias{get_legend_from_SDA} +\alias{get_lmuaoverlap_from_SDA} +\alias{get_mapunit_from_SDA} +\alias{get_component_from_SDA} +\alias{get_chorizon_from_SDA} +\alias{get_cosoilmoist_from_SDA} +\alias{get_cointerp_from_SDA} + +\title{Download and Flatten Data from Soil Data Access} +\description{Functions to download and flatten commonly used tables and from Soil Data Access, and create soil profile collection objects (SPC).} +\usage{ +fetchSDA(WHERE = NULL, duplicates = FALSE, childs = TRUE, + nullFragsAreZero = TRUE, rmHzErrors = FALSE, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) + +get_mapunit_from_SDA(WHERE = NULL, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) + +get_component_from_SDA(WHERE = NULL, duplicates = FALSE, childs = TRUE, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) + +get_chorizon_from_SDA(WHERE = NULL, duplicates = FALSE, childs = TRUE, + nullFragsAreZero = TRUE, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) + +get_cosoilmoist_from_SDA(WHERE = NULL, duplicates = FALSE, impute = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) + +} + + +\arguments{ + \item{WHERE}{text string formated as an SQL WHERE clause (default: FALSE)} + \item{duplicates}{logical; if TRUE a record is returned for each unique mukey (may be many per nationalmusym)} + \item{childs}{logical; if FALSE parent material and geomorphic child tables are not flattened and appended} + \item{impute}{replace missing (i.e. NULL) values with "Not_Populated" for categorical data, or the "RV" for numeric data or 201 cm if the "RV" is also NULL (default: TRUE)} + \item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? (default: TRUE), see details} + \item{rmHzErrors}{should pedons with horizonation errors be removed from the results? (default: FALSE)} + \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + } + + +\details{These functions return data from Soil Data Access with the use of a simple text string that formated as an SQL WHERE clause (e.g. \code{WHERE = "areasymbol = 'IN001'"}. All functions are SQL querys that wrap around \code{SDAquery()} and format the data for analysis. + +Beware SDA includes the data for both SSURGO and STATSGO2. The \code{areasymbol} for STATSGO2 is \code{US}. Therefore if data from just SSURGO is desired, set \code{WHERE = "areareasymbol != 'US'"}. + +If the duplicates argument is set to TRUE, duplicate components are returned. This is not necessary with data returned from NASIS, which has one unique national map unit. SDA has duplicate map national map units, one for each legend it exists in. + +The value of \code{nullFragsAreZero} will have a significant impact on the rock fragment fractions returned by \code{fetchSDA}. Set \code{nullFragsAreZero = FALSE} in those cases where there are many data-gaps and NULL rock fragment values should be interpretated as NULLs. Set \code{nullFragsAreZero = TRUE} in those cases where NULL rock fragment values should be interpreted as 0. + +} +\value{A dataframe or soil profile collection object.} +\author{Stephen Roecker} + + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\link{SDA_query} +} + + +\examples{ +\donttest{ + + +if (requireNamespace("curl") & + curl::has_internet() & + require(aqp) & + require("ggplot2") & + require("gridExtra") & + require("viridis") +) { + + # query soil components by areasymbol and musym + test = fetchSDA(WHERE = "areasymbol = 'IN005' AND musym = 'MnpB2'") + + + # profile plot + plot(test) + + + # convert the data for depth plot + clay_slice = horizons(slice(test, 0:200 ~ claytotal_l + claytotal_r + claytotal_h)) + names(clay_slice) <- gsub("claytotal_", "", names(clay_slice)) + + om_slice = horizons(slice(test, 0:200 ~ om_l + om_r + om_h)) + names(om_slice) = gsub("om_", "", names(om_slice)) + + test2 = rbind(data.frame(clay_slice, var = "clay"), + data.frame(om_slice, var = "om") + ) + + h = merge(test2, site(test)[c("nationalmusym", "cokey", "compname", "comppct_r")], + by = "cokey", + all.x = TRUE + ) + + # depth plot of clay content by soil component + gg_comp <- function(x) { + ggplot(x) + + geom_line(aes(y = r, x = hzdept_r)) + + geom_line(aes(y = r, x = hzdept_r)) + + geom_ribbon(aes(ymin = l, ymax = h, x = hzdept_r), alpha = 0.2) + + xlim(200, 0) + + xlab("depth (cm)") + + facet_grid(var ~ nationalmusym + paste(compname, comppct_r)) + + coord_flip() + } + g1 <- gg_comp(subset(h, var == "clay")) + g2 <- gg_comp(subset(h, var == "om")) + + grid.arrange(g1, g2) + + + # query cosoilmoist (e.g. water table data) by mukey + x <- get_cosoilmoist_from_SDA(WHERE = "mukey = '1395352'") + + ggplot(x, aes(x = as.integer(month), y = dept_r, lty = status)) + + geom_rect(aes(xmin = as.integer(month), xmax = as.integer(month) + 1, + ymin = 0, ymax = max(x$depb_r), + fill = flodfreqcl)) + + geom_line(cex = 1) + + geom_point() + + geom_ribbon(aes(ymin = dept_l, ymax = dept_h), alpha = 0.2) + + ylim(max(x$depb_r), 0) + + xlab("month") + ylab("depth (cm)") + + scale_x_continuous(breaks = 1:12, labels = month.abb, name="Month") + + facet_wrap(~ paste0(compname, ' (', comppct_r , ')')) + + ggtitle(paste0(x$nationalmusym[1], + ': Water Table Levels from Component Soil Moisture Month Data')) + + + + # query all Miami major components + s <- get_component_from_SDA(WHERE = "compname = 'Miami' \n + AND majcompflag = 'Yes' AND areasymbol != 'US'") + + + # landform vs 3-D morphometry + test <- { + subset(s, ! is.na(landform) | ! is.na(geompos)) ->.; + split(., .$drainagecl, drop = TRUE) ->.; + lapply(., function(x) { + test = data.frame() + test = as.data.frame(table(x$landform, x$geompos)) + test$compname = x$compname[1] + test$drainagecl = x$drainagecl[1] + names(test)[1:2] <- c("landform", "geompos") + return(test) + }) ->.; + do.call("rbind", .) ->.; + .[.$Freq > 0, ] ->.; + within(., { + landform = reorder(factor(landform), Freq, max) + geompos = reorder(factor(geompos), Freq, max) + geompos = factor(geompos, levels = rev(levels(geompos))) + }) ->.; + } + test$Freq2 <- cut(test$Freq, + breaks = c(0, 5, 10, 25, 50, 100, 150), + labels = c("<5", "5-10", "10-25", "25-50", "50-100", "100-150") + ) + ggplot(test, aes(x = geompos, y = landform, fill = Freq2)) + + geom_tile(alpha = 0.5) + facet_wrap(~ paste0(compname, "\n", drainagecl)) + + scale_fill_viridis(discrete = TRUE) + + theme(aspect.ratio = 1, axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1)) + + ggtitle("Landform vs 3-D Morphometry for Miami Major Components on SDA") + + +} + + + +} +} +\keyword{manip} diff --git a/man/fetchSDA_spatial.Rd b/man/fetchSDA_spatial.Rd index bbb8020a..80fd179a 100644 --- a/man/fetchSDA_spatial.Rd +++ b/man/fetchSDA_spatial.Rd @@ -4,13 +4,8 @@ \alias{fetchSDA_spatial} \title{Query SDA and Return Spatial Data} \usage{ -fetchSDA_spatial( - x, - by.col = "mukey", - method = "feature", - add.fields = NULL, - chunk.size = 10 -) +fetchSDA_spatial(x, by.col = "mukey", method = "feature", + add.fields = NULL, chunk.size = 10) } \arguments{ \item{x}{A vector of MUKEYs or national mapunit symbols.} @@ -19,12 +14,12 @@ fetchSDA_spatial( \item{method}{geometry result type: 'feature' returns polygons, 'bbox' returns the bounding box of each polygon, and 'point' returns a single point within each polygon.} -\item{add.fields}{Column names from `mapunit` table to add to result. Must specify table name prefix as either `G` or `mapunit`.} +\item{add.fields}{Column names from `mapunit` table to add to result. Must specify table name prefix `mapunit` before column name (e.g. `mapunit.muname`).} -\item{chunk.size}{How many queries should spatial request be divided into? Necessary for large extents. Default: 10} +\item{chunk.size}{How many queries should spatial request be divided into? Necessary for large results. Default: 10} } \value{ -A SpatialPolygonsDataFrame corresponding to SDA spatial data for all MUKEYs / nmusyms requested. Default result contains MupolygonWktWgs84-derived geometry with attribute table containing `gid`, `mukey` and `nationalmusym`, additional fields in result are specified with `add.fields`. +A Spatial*DataFrame corresponding to SDA spatial data for all MUKEYs / nmusyms requested. Default result contains mapunit delineation geometry with attribute table containing `gid`, `mukey` and `nationalmusym`, plus additional fields in result specified with `add.fields`. } \description{ This is a high-level fetch method that facilitates making spatial queries to Soil Data Access (SDA) based on `mukey` or `nationalmusym`. A typical SDA spatial query is made returning geometry and key identifying information about the mapunit. Additional columns from the mapunit table can be included using `add.fields` argument. @@ -33,20 +28,24 @@ This function automatically "chunks" the input vector (using `soilDB::makeChunks } \examples{ \donttest{ -# get spatial data for a single mukey -single.mukey <- fetchSDA_spatial(x = "2924882") - -# demonstrate fetching full extent (multi-mukey) of national musym -full.extent.nmusym <- fetchSDA_spatial(x = "2x8l5", by = "nmusym") - -# compare extent of nmusym to single mukey within it -if(require(sp)) { - plot(full.extent.nmusym, col = "RED",border=0) - plot(single.mukey, add = TRUE, col = "BLUE", border=0) +if(requireNamespace("curl") & + curl::has_internet()) { + + # get spatial data for a single mukey + single.mukey <- fetchSDA_spatial(x = "2924882") + + # demonstrate fetching full extent (multi-mukey) of national musym + full.extent.nmusym <- fetchSDA_spatial(x = "2x8l5", by = "nmusym") + + # compare extent of nmusym to single mukey within it + if(require(sp)) { + plot(full.extent.nmusym, col = "RED",border=0) + plot(single.mukey, add = TRUE, col = "BLUE", border=0) + } + + # demo adding a field (`muname`) to attribute table of result + head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) } - -# demo adding a field (`muname`) to attribute table of result -head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) } } \author{ diff --git a/man/mapunit_geom_by_ll_bbox.Rd b/man/mapunit_geom_by_ll_bbox.Rd index aa5af7cb..5350ec46 100644 --- a/man/mapunit_geom_by_ll_bbox.Rd +++ b/man/mapunit_geom_by_ll_bbox.Rd @@ -1,60 +1,63 @@ -\name{mapunit_geom_by_ll_bbox} -\alias{mapunit_geom_by_ll_bbox} -\title{Fetch Map Unit Geometry from SDA} -\description{Fetch map unit geometry from the SDA website by WGS84 bounding box.} - -\usage{ -mapunit_geom_by_ll_bbox(bbox, source = 'sda') -} -\arguments{ - \item{bbox}{a bounding box in WGS coordinates} - \item{source}{the source database, currently limited to soil data access (SDA)} -} - -\details{The SDA website can be found at \url{http://sdmdataaccess.nrcs.usda.gov}. See examples for bounding box formatting.} - -\value{A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) coordinates.} -\references{http://casoilresource.lawr.ucdavis.edu/} - -\author{Dylan E Beaudette} - -\note{It appears that SDA does not actually return the spatial intersecion of map unit polygons and bounding box. Rather, just those polygons that are completely within the bounding box / overlap with the bbox. This function requires the `rgdal` package.} - -\examples{ -# fetch map unit geometry from a bounding-box: -# -# +------------- (-120.41, 38.70) -# | | -# | | -# (-120.54, 38.61) --------------+ - -\donttest{ -if(require(sp) & require(rgdal)) { - - # basic usage - b <- c(-120.54,38.61,-120.41,38.70) - x <- try(mapunit_geom_by_ll_bbox(b)) # about 20 seconds - - if(!inherits(x,'try-error')) - # note that the returned geometry is everything overlapping the bbox - # and not an intersection... why? - plot(x) - rect(b[1], b[2], b[3], b[4], border='red', lwd=2) - - - # get map unit data for matching map unit keys - in.statement <- format_SQL_in_statement(unique(x$MUKEY)) - q <- paste("SELECT mukey, muname FROM mapunit WHERE mukey IN ", in.statement, sep="") - res <- SDA_query(q) - } else { - message('could not download XML result from SDA') - } -} -} - - -\keyword{manip} - - - - +\name{mapunit_geom_by_ll_bbox} +\alias{mapunit_geom_by_ll_bbox} +\title{Fetch Map Unit Geometry from SDA} +\description{Fetch map unit geometry from the SDA website by WGS84 bounding box.} + +\usage{ +mapunit_geom_by_ll_bbox(bbox, source = 'sda') +} +\arguments{ + \item{bbox}{a bounding box in WGS coordinates} + \item{source}{the source database, currently limited to soil data access (SDA)} +} + +\details{The SDA website can be found at \url{http://sdmdataaccess.nrcs.usda.gov}. See examples for bounding box formatting.} + +\value{A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) coordinates.} +\references{http://casoilresource.lawr.ucdavis.edu/} + +\author{Dylan E Beaudette} + +\note{It appears that SDA does not actually return the spatial intersecion of map unit polygons and bounding box. Rather, just those polygons that are completely within the bounding box / overlap with the bbox. This function requires the `rgdal` package.} + +\examples{ +# fetch map unit geometry from a bounding-box: +# +# +------------- (-120.41, 38.70) +# | | +# | | +# (-120.54, 38.61) --------------+ + +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + require(sp) & + require(rgdal)) { + + # basic usage + b <- c(-120.54,38.61,-120.41,38.70) + x <- try(mapunit_geom_by_ll_bbox(b)) # about 20 seconds + + if(!inherits(x,'try-error')) + # note that the returned geometry is everything overlapping the bbox + # and not an intersection... why? + plot(x) + rect(b[1], b[2], b[3], b[4], border='red', lwd=2) + + + # get map unit data for matching map unit keys + in.statement <- format_SQL_in_statement(unique(x$MUKEY)) + q <- paste("SELECT mukey, muname FROM mapunit WHERE mukey IN ", in.statement, sep="") + res <- SDA_query(q) + } else { + message('could not download XML result from SDA') + } +} +} + + +\keyword{manip} + + + + diff --git a/man/seriesExtent.Rd b/man/seriesExtent.Rd index b948d61b..e9b943d1 100644 --- a/man/seriesExtent.Rd +++ b/man/seriesExtent.Rd @@ -1,33 +1,36 @@ -\name{seriesExtent} -\alias{seriesExtent} -\title{Get/Display Soil Series Extent} -\description{Get or display the spatial extent of a named soil series using the Series Extent Explorer.} -\usage{ -seriesExtent(s, timeout=60) -} -\arguments{ - \item{s}{the soil series name} - \item{timeout}{time that we are willing to wait for a response, in seconds} -} -\details{Soil series extent data are downloaded from a static cache of GeoJSON files on SoilWeb servers. Cached data are typically updated annually.} - -\value{when calling \code{seriesExtent}, a \code{SpatialPolygonsDataFrame} object} - -\references{http://casoilresource.lawr.ucdavis.edu/see} - -\author{D.E. Beaudette} - -\note{This function require the `rgdal` package.} - -\examples{ -\donttest{ -# fetch series extent for the 'Amador' soil series -s <- seriesExtent('amador') - -# plot SpatialPolygonsDataFrame -if(require(sp)) - plot(s) - -} -} -\keyword{manip} +\name{seriesExtent} +\alias{seriesExtent} +\title{Get/Display Soil Series Extent} +\description{Get or display the spatial extent of a named soil series using the Series Extent Explorer.} +\usage{ +seriesExtent(s, timeout=60) +} +\arguments{ + \item{s}{the soil series name} + \item{timeout}{time that we are willing to wait for a response, in seconds} +} +\details{Soil series extent data are downloaded from a static cache of GeoJSON files on SoilWeb servers. Cached data are typically updated annually.} + +\value{when calling \code{seriesExtent}, a \code{SpatialPolygonsDataFrame} object} + +\references{http://casoilresource.lawr.ucdavis.edu/see} + +\author{D.E. Beaudette} + +\note{This function require the `rgdal` package.} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # fetch series extent for the 'Amador' soil series + s <- seriesExtent('amador') + + # plot SpatialPolygonsDataFrame + if(require(sp)) + plot(s) +} +} +} +\keyword{manip} diff --git a/man/siblings.Rd b/man/siblings.Rd index e2251571..47a62b91 100644 --- a/man/siblings.Rd +++ b/man/siblings.Rd @@ -41,14 +41,18 @@ D.E. Beaudette \examples{ \donttest{ -# basic usage -x <- siblings('zook') -x$sib - -# restrict to siblings that are major components -# e.g. the most likely siblings -x <- siblings('zook', only.major = TRUE) -x$sib +if(requireNamespace("curl") & + curl::has_internet()) { + + # basic usage + x <- siblings('zook') + x$sib + + # restrict to siblings that are major components + # e.g. the most likely siblings + x <- siblings('zook', only.major = TRUE) + x$sib +} } } diff --git a/man/uncode.Rd b/man/uncode.Rd index 71f6387a..43d81272 100644 --- a/man/uncode.Rd +++ b/man/uncode.Rd @@ -1,51 +1,53 @@ -\name{uncode} -\alias{metadata} -\alias{uncode} -\alias{code} - -\title{Convert coded values returned from NASIS and SDA queries to factors} -\description{These functions convert the coded values returned from NASIS or SDA to factors (e.g. 1 = Alfisols) using the metadata tables from NASIS. For SDA the metadata is pulled from a static snapshot in the soilDB package (/data/metadata.rda).} -\usage{ -uncode(df, invert = FALSE, db = "NASIS", - droplevels = FALSE, - stringsAsFactors = default.stringsAsFactors() - ) -code(df, ...) -} -%- maybe also 'usage' for other objects documented here. -\arguments{ - \item{df}{data.frame} - \item{invert}{converts the code labels back to their coded values (FALSE)} - \item{db}{label specifying the soil database the data is coming from, which indicates whether or not to query metadata from local NASIS database ("NASIS") or use soilDB-local snapshot ("LIMS" or "SDA")} - \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} - \item{\dots}{arguments passed on to \code{uncode}} - } - -\details{These functions convert the coded values returned from NASIS into their plain text representation. It duplicates the functionality of the CODELABEL function found in NASIS. This function is primarily intended to be used internally by other soilDB R functions, in order to minimizes the need to manually convert values. - -The function works by iterating through the column names in a data frame and looking up whether they match any of the ColumnPhysicalNames found in the metadata domain tables. If matches are found then the columns coded values are converted to their corresponding factor levels. Therefore it is not advisable to reuse column names from NASIS unless the contents match the range of values and format found in NASIS. Otherwise uncode() will convert their values to NA. - -When data is being imported from NASIS, the metadata tables are sourced directly from NASIS. When data is being imported from SDA or the NASIS Web Reports, the metadata is pulled from a static snapshot in the soilDB package. - -Beware the default is to return the values as factors rather than strings. While strings are generally preferable, factors make plotting more convenient. Generally the factor level ordering returned by uncode() follows the naturally ordering of categories that would be expected (e.g. sand, silt, clay). -} - -\value{A data frame with the results.} -\author{Stephen Roecker} -\examples{ -\donttest{ -if(require(aqp)) { - # query component by nationalmusym - comp <- fetchSDA(WHERE = "nationalmusym = '2vzcp'") - s <- site(comp) - - # use SDA uncoding domain via db argument - s <- uncode(s, db="SDA") - levels(s$taxorder) -} -} -} -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. -\keyword{manip}% use one of RShowDoc("KEYWORDS") +\name{uncode} +\alias{metadata} +\alias{uncode} +\alias{code} + +\title{Convert coded values returned from NASIS and SDA queries to factors} +\description{These functions convert the coded values returned from NASIS or SDA to factors (e.g. 1 = Alfisols) using the metadata tables from NASIS. For SDA the metadata is pulled from a static snapshot in the soilDB package (/data/metadata.rda).} +\usage{ +uncode(df, invert = FALSE, db = "NASIS", + droplevels = FALSE, + stringsAsFactors = default.stringsAsFactors() + ) +code(df, ...) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{df}{data.frame} + \item{invert}{converts the code labels back to their coded values (FALSE)} + \item{db}{label specifying the soil database the data is coming from, which indicates whether or not to query metadata from local NASIS database ("NASIS") or use soilDB-local snapshot ("LIMS" or "SDA")} + \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + \item{\dots}{arguments passed on to \code{uncode}} + } + +\details{These functions convert the coded values returned from NASIS into their plain text representation. It duplicates the functionality of the CODELABEL function found in NASIS. This function is primarily intended to be used internally by other soilDB R functions, in order to minimizes the need to manually convert values. + +The function works by iterating through the column names in a data frame and looking up whether they match any of the ColumnPhysicalNames found in the metadata domain tables. If matches are found then the columns coded values are converted to their corresponding factor levels. Therefore it is not advisable to reuse column names from NASIS unless the contents match the range of values and format found in NASIS. Otherwise uncode() will convert their values to NA. + +When data is being imported from NASIS, the metadata tables are sourced directly from NASIS. When data is being imported from SDA or the NASIS Web Reports, the metadata is pulled from a static snapshot in the soilDB package. + +Beware the default is to return the values as factors rather than strings. While strings are generally preferable, factors make plotting more convenient. Generally the factor level ordering returned by uncode() follows the naturally ordering of categories that would be expected (e.g. sand, silt, clay). +} + +\value{A data frame with the results.} +\author{Stephen Roecker} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + require(aqp)) { + # query component by nationalmusym + comp <- fetchSDA(WHERE = "nationalmusym = '2vzcp'") + s <- site(comp) + + # use SDA uncoding domain via db argument + s <- uncode(s, db="SDA") + levels(s$taxorder) +} +} +} +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip}% use one of RShowDoc("KEYWORDS") diff --git a/man/us_ss_timeline.Rd b/man/us_ss_timeline.Rd index 2f87bc12..45a515e1 100644 --- a/man/us_ss_timeline.Rd +++ b/man/us_ss_timeline.Rd @@ -32,12 +32,12 @@ https://www.nrcs.usda.gov/wps/portal/nrcs/soilsurvey/soils/survey/state/ \examples{ \donttest{ - -if ( - require("XML") & - require("RCurl") & - require("ggplot2") & - require("gridExtra") +if (requireNamespace("curl") & + curl::has_internet() & + require("XML") & + require("RCurl") & + require("ggplot2") & + require("gridExtra") ) { data(state)