From 77b7d58903b553792a69b78cff374eba7e7b1e74 Mon Sep 17 00:00:00 2001 From: leppott Date: Wed, 27 Sep 2017 09:54:03 -0400 Subject: [PATCH] v2.0.1.9043 Issue #25 , Issues #37 Polishing rough edges. --- .Rhistory | 958 +++++++++--------- DESCRIPTION | 16 +- LOG.Items.20170920.131124.tab | 5 - LOG.Items.20170920.131143.tab | 4 - NAMESPACE | 1 + NEWS.md | 35 +- NEWS.rmd | 14 + R/CompSiteCDF.R | 176 ++++ R/data.r | 9 + README.Rmd | 125 +-- README.md | 91 +- data-raw/CDF_WaterTemp_2014_MA.csv | 350 +++++++ data-raw/ProcessData_CompSiteCDF.R | 33 + data/data_CompSiteCDF.rda | Bin 0 -> 11626 bytes inst/extdata/ContDataQC_LibraryCreation.Rmd | 127 +++ .../ContDataQC_LibraryCreation.nb.html | 354 +++++++ man/CompSiteCDF.Rd | 47 + man/data_CompSiteCDF.Rd | 14 + 18 files changed, 1762 insertions(+), 597 deletions(-) delete mode 100644 LOG.Items.20170920.131124.tab delete mode 100644 LOG.Items.20170920.131143.tab create mode 100644 R/CompSiteCDF.R create mode 100644 data-raw/CDF_WaterTemp_2014_MA.csv create mode 100644 data-raw/ProcessData_CompSiteCDF.R create mode 100644 data/data_CompSiteCDF.rda create mode 100644 inst/extdata/ContDataQC_LibraryCreation.Rmd create mode 100644 inst/extdata/ContDataQC_LibraryCreation.nb.html create mode 100644 man/CompSiteCDF.Rd create mode 100644 man/data_CompSiteCDF.Rd diff --git a/.Rhistory b/.Rhistory index 18a00bc..e3f4c77 100644 --- a/.Rhistory +++ b/.Rhistory @@ -1,96 +1,479 @@ -# Document, Install, and Reload Library -## Generate Documentation -setwd(paste0("./",myLibrary)) -devtools::document() -## Install New Package (locally) -setwd("..") # return to root directory first -devtools::install(myLibrary) -## Reload library -library(myLibrary,character.only = TRUE) -# change wd back to package -setwd(paste0("./",myLibrary)) -library(ContDataQC) -??ContDataQC -library(ContDataQC) -?ContDataQC -# Examples of each operation -# Parameters -Selection.Operation <- c("GetGageData","QCRaw", "Aggregate", "SummaryStats") -Selection.Type <- c("Air","Water","AW","Gage","AWG","AG","WG") -Selection.SUB <- c("Data1_RAW","Data2_QC","Data3_Aggregated","Data4_Stats") -myDir.BASE <- getwd() -# Create data directories -myDir.create <- paste0("./",Selection.SUB[1]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[2]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[3]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[4]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -# Save example data (assumes directory ./Data1_RAW/ exists) -myData <- data_raw_test2_AW_20130426_20130725 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20130426_20130725.csv")) -myData <- data_raw_test2_AW_20130725_20131015 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20130725_20131015.csv")) -myData <- data_raw_test2_AW_20140901_20140930 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20140901_20140930.csv")) -myData <- data_raw_test4_AW_20160418_20160726 -write.csv(myData,paste0("./",Selection.SUB[1],"/test4_AW_20160418_20160726.csv")) -myFile <- "config.TZ.Central.R" -file.copy(file.path(path.package("ContDataQC"),"extdata",myFile),file.path(getwd(),Selection.SUB[1],myFile)) -# Get Gage Data -myData.Operation <- "GetGageData" #Selection.Operation[1] -myData.SiteID <- "01187300" # Hubbard River near West Hartland, CT -myData.Type <- Selection.Type[4] #"Gage" -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- "" -myDir.export <- file.path(myDir.BASE,Selection.SUB[1]) -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export) -# Get Gage Data (central time zone) -myData.Operation <- "GetGageData" #Selection.Operation[1] -myData.SiteID <- "07032000" # Mississippi River at Memphis, TN -myData.Type <- Selection.Type[4] #"Gage" -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- "" -myDir.export <- file.path(myDir.BASE,Selection.SUB[1]) -myConfig <- file.path(getwd(),Selection.SUB[1],"config.TZ.central.R") # include path if not in working directory -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export, myConfig) +ParamName.xlab <- ContData.env$myLab.WaterTemp +wd <- getwd() +myFile <- "CDF_WaterTemp_2014_MA.csv" +data.import <- read.csv(file.path(wd,"data-raw",myFile)) +Col.Sites <- names(data.import)[!(names(data.import) %in% myName.Date)] +data.import[,ContData.env$myName.Yr] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y") +data.import[,ContData.env$myName.Mo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m") +data.import[,ContData.env$myName.YrMo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y%m") +data.import[,ContData.env$myName.MoDa] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m%d") +data.import[,ContData.env$myName.Season] <- NA +Col.Sites <- names(data.import)[!(names(data.import) %in% ContData.envmyName.Date)] +Col.Sites <- names(data.import)[!(names(data.import) %in% ContData.env$myName.Date)] +data.import[,ContData.env$myName.Yr] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y") +data.import[,ContData.env$myName.Mo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m") +data.import[,ContData.env$myName.YrMo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y%m") +data.import[,ContData.env$myName.MoDa] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m%d") +data.import[,ContData.env$myName.Season] <- NA +data.import[,ContData.env$myName.Season][as.numeric(data.import[,ContData.env$myName.MoDa])>=as.numeric("0101") & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Spring.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Summer.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Fall.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Winter.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])<=as.numeric("1231")] <- "Winter" +data.import[,ContData.env$myName.YrSeason] <- paste(data.import[,ContData.env$myName.Yr],data.import[,ContData.env$myName.Season],sep="") +View(data.import) +x <- ecdf(data.import[,Col.Sites[1]]) +plot(x, col="blue") +i=1 +myDate <- format(Sys.Date(),"%Y%m%d") +myTime <- format(Sys.time(),"%H%M%S") +strFile <- "CompSiteCDF_" +strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") +cbPalette <- c("#999999", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7") +cbbPalette <- c("#000000", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7") +for (i in 1:length(Col.Sites)){ +myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){ +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +} +legend("bottomright",Col.Sites,fill=myColors) +} +Col.Sites +#for testing load ContData.env +source(file.path(getwd(),"R","config.R")) +ParamName.xlab <- ContData.env$myLab.WaterTemp +#df.data <- data_CompSiteCDF +wd <- getwd() +myFile <- "CDF_WaterTemp_2014_MA.csv" +data.import <- read.csv(file.path(wd,"data-raw",myFile)) +# Site Names (Columns) +Col.Sites <- names(data.import)[!(names(data.import) %in% ContData.env$myName.Date)] +Col.Sites +# add time period fields +data.import[,ContData.env$myName.Yr] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y") +data.import[,ContData.env$myName.Mo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m") +data.import[,ContData.env$myName.YrMo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y%m") +data.import[,ContData.env$myName.MoDa] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m%d") +# data.import[,ContData.env$myName.JuDa] <- as.POSIXlt(data.import[,ContData.env$myName.Date], format=ContData.env$myFormat.Date)$yday +1 +# ## add Season fields +data.import[,ContData.env$myName.Season] <- NA +data.import[,ContData.env$myName.Season][as.numeric(data.import[,ContData.env$myName.MoDa])>=as.numeric("0101") & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Spring.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Summer.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Fall.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Winter.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])<=as.numeric("1231")] <- "Winter" +data.import[,ContData.env$myName.YrSeason] <- paste(data.import[,ContData.env$myName.Yr],data.import[,ContData.env$myName.Season],sep="") +# +View(data.import) +# calc CDF +x <- ecdf(data.import[,Col.Sites[1]]) +plot(x, col="blue") +# plot +i=1 +myDate <- format(Sys.Date(),"%Y%m%d") +myTime <- format(Sys.time(),"%H%M%S") +strFile <- "CompSiteCDF_" +strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") +# Color Blind Palatte +# http://www.cookbook-r.com/Graphs/Colors_(ggplot2)/ +# The palette with grey: +cbPalette <- c("#999999", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7") +# The palette with black: +cbbPalette <- c("#000000", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7") +for (i in 1:length(Col.Sites)){ +myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){ +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +} +legend("bottomright",Col.Sites,fill=myColors) +} +x <- round(data.import$Browns,1) +y <- as.data.frame(table(x)) +z <- sum(y$Freq) +y$Freq <- y$Freq/z +plot(y, type="l") +y$x <- as.numeric(y$x) +plot(y, type="l") +pdf(file=strFile.Out)##PDF.START +for (i in 1:length(Col.Sites)){ +myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){ +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +} +legend("bottomright",Col.Sites,fill=myColors) +} +plot(y, type="l") +dev.off() ##PDF.END +head(y) +View(y) +hist(y) +?hist +x <- round(data.import$Browns,1) +y <- as.data.frame(table(x)) +z <- sum(y$Freq) +hist(x[1]) +hist(data.import$Browns) +lines(density(data.import$Browns), col="blue") +data.import$Browns +AA <- !is.na(data.import$Browns) +hist(AA) +AA <- data.import$Browns[!is.na(data.import$Browns)] +hist(AA) +lines(density(AA), col="blue") +hist(AA, prob=TRUE) +lines(density(AA), col="blue") +lines(density(AA), col="blue", lwd=2) +box() +pdf(file=strFile.Out)##PDF.START +for (i in 1:length(Col.Sites)){ +myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){ +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +} +legend("bottomright",Col.Sites,fill=myColors) +} +x <- round(data.import$Browns,1) +y <- as.data.frame(table(x)) +z <- sum(y$Freq) +y$Freq <- y$Freq/z +plot(y, type="l") +y$x <- as.numeric(y$x) # works for plot but converts to rowID +plot(y, type="l") +AA <- data.import$Browns[!is.na(data.import$Browns)] +hist(AA, prob=TRUE) +lines(density(AA), col="blue", lwd=2) +box() +dev.off() ##PDF.END +?hist +j=1 +hist(data.import[,Col.Sites[j]], plot=FALSE) +hist(data.import[,Col.Sites[j]], plot=TRUE) +dev.off() ##PDF.END +hist(data.import[,Col.Sites[j]], plot=TRUE) +hist(data.import[,Col.Sites[j]], plot=TRUE, density=TRUE) +hist(data.import[,Col.Sites[j]], plot=TRUE) +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,j][!is.na(data.import[,j])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], plot=TRUE) +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +j=1 +data.j <- data.import[,j][!is.na(data.import[,j])] +data.j +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +data.j +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], plot=TRUE) +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE) +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, col="white") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +pdf(file=strFile.Out)##PDF.START +myColors <- cbPalette #rainbow(length(Col.Sites)) +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]][!is.na(data.import[,Col.Sites[j]])] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +for (i in 1:length(Col.Sites)){##FOR.i.START +#myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){##IF.i==1.START +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +}##IF.i==1.END +legend("bottomright",Col.Sites,fill=myColors) +}##FOR.i.END +dev.off() ##PDF.END +dev.off() ##PDF.END +# PLOT 1 +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +{ +pdf(file=strFile.Out)##PDF.START +par(mfrow=c(2,1)) +myColors <- cbPalette #rainbow(length(Col.Sites)) +# PLOT 1 +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +# Plot 2 +for (i in 1:length(Col.Sites)){##FOR.i.START +#myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){##IF.i==1.START +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +}##IF.i==1.END +legend("bottomright",Col.Sites,fill=myColors) +}##FOR.i.END +dev.off() ##PDF.END +} +?pdf +myDate <- format(Sys.Date(),"%Y%m%d") +myTime <- format(Sys.time(),"%H%M%S") +strFile <- "CompSiteCDF_" +strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") +{ +pdf(file=strFile.Out, width=7, height=10)##PDF.START +par(mfrow=c(2,1)) +myColors <- cbPalette #rainbow(length(Col.Sites)) +# PLOT 1 +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +# Plot 2 +for (i in 1:length(Col.Sites)){##FOR.i.START +#myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){##IF.i==1.START +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=1.5, do.p=FALSE, add=T) +}##IF.i==1.END +legend("bottomright",Col.Sites,fill=myColors) +}##FOR.i.END +dev.off() ##PDF.END +} +{ +myDate <- format(Sys.Date(),"%Y%m%d") +myTime <- format(Sys.time(),"%H%M%S") +strFile <- "CompSiteCDF_" +strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") +# +pdf(file=strFile.Out, width=7, height=10)##PDF.START +par(mfrow=c(2,1)) +myColors <- cbPalette #rainbow(length(Col.Sites)) +# PLOT 1 +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +# Plot 2 +for (i in 1:length(Col.Sites)){##FOR.i.START +#myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){##IF.i==1.START +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=2, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=2, do.p=FALSE, add=T) +}##IF.i==1.END +legend("bottomright",Col.Sites,fill=myColors) +}##FOR.i.END +dev.off() ##PDF.END +} +{ +myDate <- format(Sys.Date(),"%Y%m%d") +myTime <- format(Sys.time(),"%H%M%S") +strFile <- "CompSiteCDF_" +strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") +# +pdf(file=strFile.Out, width=7, height=10)##PDF.START +par(mfrow=c(2,1)) +myColors <- cbPalette #rainbow(length(Col.Sites)) +# PLOT 1 +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +# Plot 2 +myLWD <- 1.5 +for (i in 1:length(Col.Sites)){##FOR.i.START +#myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){##IF.i==1.START +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=myLWD, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=myLWD, do.p=FALSE, add=T) +}##IF.i==1.END +legend("bottomright",Col.Sites,fill=myColors) +}##FOR.i.END +dev.off() ##PDF.END +} +{ +myDate <- format(Sys.Date(),"%Y%m%d") +myTime <- format(Sys.time(),"%H%M%S") +strFile <- "CompSiteCDF_" +strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") +# +pdf(file=strFile.Out, width=7, height=10)##PDF.START +par(mfrow=c(2,1)) +myColors <- cbPalette #rainbow(length(Col.Sites)) +# PLOT 1 +for (j in 1:length(Col.Sites)){##FOR.j.START +# subset out NA +data.j <- data.import[,Col.Sites[j]] +# different first iteration +if (j==1) {##IF.j==1,START +hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" +,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") +box() +}##IF.j==1.END +# plot lines +lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) +}##FOR.j.END +legend("topright",Col.Sites,fill=myColors) +# Plot 2 +myLWD <- 1.5 +for (i in 1:length(Col.Sites)){##FOR.i.START +#myColors <- cbPalette #rainbow(length(Col.Sites)) +if(i==1){##IF.i==1.START +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=myLWD, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE +#, col.01line="white" +, main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) +} else { +plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=myLWD, do.p=FALSE, add=T) +}##IF.i==1.END +}##FOR.i.END +legend("bottomright",Col.Sites,fill=myColors) +dev.off() ##PDF.END +} library(devtools) -install_github("USGS-R/dataRetrieval") -install.packages("dataRetrieval",repos="https://owi.usgs.gov/R") -install.packages("dataRetrieval", repos = "https://owi.usgs.gov/R") library(ContDataQC) -install.packages("dplyr") +devtools::check() +devtools::check() +?ContDataQC library(ContDataQC) -# Get Gage Data -myData.Operation <- "GetGageData" #Selection.Operation[1] -myData.SiteID <- "01187300" # Hubbard River near West Hartland, CT -myData.Type <- Selection.Type[4] #"Gage" -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- "" -myDir.export <- file.path(myDir.BASE,Selection.SUB[1]) -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export) -# Get Gage Data -myData.Operation <- "GetGageData" #Selection.Operation[1] -myData.SiteID <- "01187300" # Hubbard River near West Hartland, CT -myData.Type <- "Gage" #Selection.Type[4] -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- "" -myDir.export <- file.path(myDir.BASE,Selection.SUB[1]) -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export) -# Library Name -myLibrary <- "ContDataQC" # "ContDataQC","MMIcalc","MMIcalcNV","MBSStools","XC95" +?ContDataQC +myLibrary <- "ContDataQC" # Load Library library(devtools) -# generate Vignette -devtools::build_vignettes() -# Document, Install, and Reload Library -## Generate Documentation setwd(paste0("./",myLibrary)) devtools::document() ## Install New Package (locally) @@ -101,287 +484,21 @@ library(myLibrary,character.only = TRUE) # change wd back to package setwd(paste0("./",myLibrary)) library(ContDataQC) -??ContDataQC -##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +?ContDataQC +vignette(package="ContDataQC") +print(vignette(package="ContDataQC")) +vignette("rotated", package="grid") +vignette(package="ContDataQC") +vignette("ContDataQC_Vignette",package="ContDataQC") # Library Name -myLibrary <- "ContDataQC" # "ContDataQC","MMIcalc","MMIcalcNV","MBSStools","XC95" +myLibrary <- "ContDataQC" # Load Library library(devtools) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Create Package -# Document, Install, and Reload Library -## Generate Documentation -setwd(paste0("./",myLibrary)) -devtools::document() -## Install New Package (locally) -setwd("..") # return to root directory first -devtools::install(myLibrary) -## Reload library -library(myLibrary,character.only = TRUE) -# change wd back to package -setwd(paste0("./",myLibrary)) +# create(myLibrary) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -library(ContDataQC) -library(StreamThermal) -?ST.freq -?T_frequency -library(dataRetrieval) -ExUSGSStreamTemp<-readNWISdv("01382310","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -names(sitedata)<-c("siteID","Date","MaxT","MinT","MeanT") -T_frequency(sitedata) -?Export.IHA -?T_frequency -View(sitedata) -?ContDataQC -# Examples of each operation -# Parameters -Selection.Operation <- c("GetGageData","QCRaw", "Aggregate", "SummaryStats") -Selection.Type <- c("Air","Water","AW","Gage","AWG","AG","WG") -Selection.SUB <- c("Data1_RAW","Data2_QC","Data3_Aggregated","Data4_Stats") -myDir.BASE <- getwd() -# Create data directories -myDir.create <- paste0("./",Selection.SUB[1]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[2]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[3]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[4]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -# Save example data (assumes directory ./Data1_RAW/ exists) -myData <- data_raw_test2_AW_20130426_20130725 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20130426_20130725.csv")) -myData <- data_raw_test2_AW_20130725_20131015 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20130725_20131015.csv")) -myData <- data_raw_test2_AW_20140901_20140930 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20140901_20140930.csv")) -myData <- data_raw_test4_AW_20160418_20160726 -write.csv(myData,paste0("./",Selection.SUB[1],"/test4_AW_20160418_20160726.csv")) -myFile <- "config.TZ.Central.R" -file.copy(file.path(path.package("ContDataQC"),"extdata",myFile),file.path(getwd(),Selection.SUB[1],myFile)) -# QC Raw Data -myData.Operation <- "QCRaw" #Selection.Operation[2] -myData.SiteID <- "test2" -myData.Type <- Selection.Type[3] #"AW" -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- file.path(myDir.BASE,Selection.SUB[1]) #"Data1_RAW" -myDir.export <- file.path(myDir.BASE,Selection.SUB[2]) #"Data2_QC" -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export) -# Aggregate Data -myData.Operation <- "Aggregate" #Selection.Operation[3] -myData.SiteID <- "test2" -myData.Type <- Selection.Type[3] #"AW" -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- file.path(myDir.BASE,Selection.SUB[2]) #"Data2_QC" -myDir.export <- file.path(myDir.BASE,Selection.SUB[3]) #"Data3_Aggregated" -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export) -# Summary Stats -myData.Operation <- "SummaryStats" #Selection.Operation[4] -myData.SiteID <- "test2" -myData.Type <- Selection.Type[3] #"AW" -myData.DateRange.Start <- "2013-01-01" -myData.DateRange.End <- "2014-12-31" -myDir.import <- file.path(myDir.BASE,Selection.SUB[3]) #"Data3_Aggregated" -myDir.export <- file.path(myDir.BASE,Selection.SUB[4]) #"Data4_Stats" -ContDataQC(myData.Operation, myData.SiteID, myData.Type, myData.DateRange.Start, myData.DateRange.End, myDir.import, myDir.export) -myFile <- "STATS_test2_Aw_20130101_20141231_Water.Temp.C.csv" -myDir <- "Data4_Stats" -myData <- read.csv(file.path(getwd(),myDir,myFile),row.names=FALSE) -myData <- read.csv(file.path(getwd(),myDir,myFile)) -View(myData) -names(myData) -dim(myData) -df.ST <- myData[myData[,"TimeFrame"]=="day",] -dim(df.ST) -View(df.ST) -Col.Keep <- c("SiteID", "TimeValue", "max", "min", "mean") -df.ST <- myData[myData[,"TimeFrame"]=="day",Col.Keep] -View(df.ST) -names(df.ST) -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(df.ST) <- Names.ST -sitedata <- myData[myData[,"TimeFrame"]=="day",Col.Keep] -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(sitedata) <- Names.ST -View(sitedata) -ST.freq <- T_frequency(sitedata) -ST.mag <- T_magnitude(sitedata) -ST.roc <- T_rateofchange(sitedata) -ST.tim <- T_timing(sitedata) -ST.var <- T_variability(sitedata) -str(sitedata) -?T_frequency -ExUSGSStreamTemp<-readNWISdv("01382310","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -names(sitedata)<-c("siteID","Date","MaxT","MinT","MeanT") -str(sitedata) -myData <- read.csv(file.path(getwd(),myDir,myFile), stringAsFactors=FALSE) -?read.csv -myData <- read.csv(file.path(getwd(),myDir,myFile), stringsAsFactors=FALSE) -str(myData) -Col.Keep <- c("SiteID", "TimeValue", "max", "min", "mean") -x <- myData[myData[,"TimeFrame"]=="day",Col.Keep] -str(x) -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(x) <- Names.ST -x[,"Date"] <- as.date(x[,"Date"]) -x[,"Date"] <- as.Date(x[,"Date"]) -str(x) -str(sitedata) -T_frequency(sitedata) -a<-T_frequency(sitedata) -View(a) -T_magnitude(sitedata) -T_rateofchange(sitedata) -T_timing(sitedata) -T_variability(sitedata) -ExUSGSStreamTemp<-readNWISdv("01382310","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -names(sitedata)<-c("siteID","Date","MaxT","MinT","MeanT") -(ST.freq <- T_frequency(sitedata)) -(ST.mag <- T_magnitude(sitedata)) -(ST.roc <- T_rateofchange(sitedata)) -(ST.tim <- T_timing(sitedata)) -(ST.var <- T_variability(sitedata)) -?T_variability -ExUSGSStreamTemp<-readNWISdv("01382310","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -names(sitedata)<-c("siteID","Date","MaxT","MinT","MeanT") -T_variability(sitedata) -?ContDataQC -?T_frequency -ExUSGSStreamTemp<-readNWISdv("01187300","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -ExUSGSStreamTemp<-readNWISdv("01187300","00010","2013-01-01","2014-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -names(sitedata)<-c("siteID","Date","MaxT","MinT","MeanT") -T_frequency(sitedata) -ExUSGSStreamTemp<-readNWISdv("01187300","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -myDF <- DATA_period_test2_Aw_20130101_20141231 -dim(myDF) -View(myDF) -myDF <- DATA_period_test2_Aw_20130101_20141231 -# parameters to use -Col.Keep <- c("SiteID", "Date", "Water.Temp.C" ) -# Col.Keep <- c(ContData.env$myName.SiteID -# , ContData.env$myName.Date -# , ContData.env$myName.WaterTemp -# ) -myDF.small <- myDF[,Col.Keep] -head(myDF.small) -?aggregate -myDF.small <- myDF[,Col.Keep] -myFUN <- c(max, min, mean) -myDF.stats <- aggregate(myDF.small[,2] ~ myDF.small[,1] + myDF.small[,2], FUN=myFUN) -myDF.stats <- aggregate(myDF.small[,2] ~ myDF.small[,1] + myDF.small[,2], FUN=mean) -myDF.stats <- aggregate(myDF.small[,2] ~ myDF.small[,1] + myDF.small[,2], FUN=mean, na.rm=TRUE) -myDF.stats -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2], FUN=mean, na.rm=TRUE) -View(myDF.stats) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2], FUN=myFUN, na.rm=TRUE) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x)), na.rm=TRUE) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x, na.rm=TRUE), MinT=min(x, na.rm=TRUE), MeanT=mean(x, na.rm=TRUE))) -View(myDF.stats) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x))) -myDF.stats -View(myDF.stats) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,Col.Keep[1]] + myDF.small[,Col.Keep[2]] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -head(myDF.stats ) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -head(myDF.stats) -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(myDF.stats) <- Names.ST -names(myDF.stats) -myDF <- DATA_period_test2_Aw_20130101_20141231 -Col.Keep <- c("SiteID", "Date", "Water.Temp.C" ) -myDF.small <- myDF[,Col.Keep] -myFUN <- c(max, min, mean) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(myDF.stats) <- Names.ST -head(myDF.stats) -names(myDF.stats) -myDF.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -str(myDF.stats) -myDF.stats <- as.data.frame(aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) )) -str(myDF.stats) -x<- cbind(myDF.stats) -dim(x) -str(x) -x <- do.call(data.frame, myDF.stats) -str(x) -agg.stats <- data.frame(aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) )) -str(add.stats) -str(agg.stats) -agg.stats <- aggregate(myDF.small[,3] ~ myDF.small[,1] + myDF.small[,2] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -df.stats <- do.call(data.frame, agg.stats) -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(df.stats) <- Names.ST -head(df.stats) -(ST.freq <- T_frequency(sitedata)) -(ST.freq <- T_frequency(sitedata)) -(ST.mag <- T_magnitude(sitedata)) -(ST.roc <- T_rateofchange(sitedata)) -(ST.tim <- T_timing(sitedata)) -myData <- DATA_period_test2_Aw_20130101_20141231 -fun.myDF <- myData -fun.col.SiteID <- "SiteID" -fun.col.Date <- "Date" -fun.col.Temp <- "Water.Temp.C" -agg.stats <- aggregate(fun.myDF[,fun.col.Temp] ~ fun.myDF[,fun.col.SiteID] + fun.myDF[,fun.col.Date] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -View(agg.stats) -str(agg.stats) -df.stats <- do.call(data.frame, agg.stats) -View(df.stats) -myData <- DATA_period_test2_Aw_20130101_20141231 -fun.myDF <- myData -fun.col.SiteID <- "SiteID" -fun.col.Date <- "Date" -fun.col.Temp <- "Water.Temp.C" -agg.stats <- aggregate(fun.myDF[,fun.col.Temp] ~ fun.myDF[,fun.col.SiteID] + fun.myDF[,fun.col.Date] -, FUN=function(x) c(MaxT=max(x), MinT=min(x), MeanT=mean(x) ) ) -df.stats <- do.call(data.frame, agg.stats) -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(df.stats) <- Names.ST -View(df.stats) -(ST.freq <- T_frequency(sitedata)) -(ST.mag <- T_magnitude(sitedata)) -(ST.roc <- T_rateofchange(sitedata)) -sitedata <- df.stats -(ST.freq <- T_frequency(sitedata)) -(ST.mag <- T_magnitude(sitedata)) -dim(sitedata) -str(sitedata) -df.stats[,Names.ST[1]] <- as.character(df.stats[,Names.ST[1]]) -df.stats[,Names.ST[2]] <- as.Date(df.stats[,Names.ST[2]]) -str(df.stats) -sitedata <- df.stats -(ST.freq <- T_frequency(sitedata)) -(ST.mag <- T_magnitude(sitedata)) -warnings() -(ST.roc <- T_rateofchange(sitedata)) -(ST.tim <- T_timing(sitedata)) -(ST.var <- T_variability(sitedata)) -# Library Name -myLibrary <- "ContDataQC" # "ContDataQC","MMIcalc","MMIcalcNV","MBSStools","XC95" -# Load Library -library(devtools) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# Create Package # Document, Install, and Reload Library ## Generate Documentation setwd(paste0("./",myLibrary)) @@ -393,120 +510,3 @@ devtools::install(myLibrary) library(myLibrary,character.only = TRUE) # change wd back to package setwd(paste0("./",myLibrary)) -library(ContDataQC) -?Export.StreamThermal -# 1.3. Export.StreamThermal to use User data -myData <- DATA_period_test2_Aw_20130101_20141231 -sitedata <- Export.StreamThermal(myData) -require(StreamThermal) -(ST.freq <- T_frequency(sitedata)) -(ST.mag <- T_magnitude(sitedata)) -(ST.roc <- T_rateofchange(sitedata)) -(ST.tim <- T_timing(sitedata)) -(ST.var <- T_variability(sitedata)) # example in package doesn't work -# 1.1. Get USGS data -# code from StreamThermal T_frequency example -ExUSGSStreamTemp<-dataRetrieval::readNWISdv("01382310","00010","2011-01-01","2011-12-31",c("00001","00002","00003")) -sitedata<-subset(ExUSGSStreamTemp, select=c("site_no","Date","X_00010_00001","X_00010_00002","X_00010_00003")) -names(sitedata)<-c("siteID","Date","MaxT","MinT","MeanT") -knitr::kable(head(sitedata)) -# 1.2. Use ContDataQC SummaryStats Data -myFile <- "STATS_test2_Aw_20130101_20141231_Water.Temp.C.csv" -myDir <- "Data4_Stats" -myData <- read.csv(file.path(getwd(),myDir,myFile), stringsAsFactors=FALSE) -# 1.2. Use ContDataQC SummaryStats Data -myFile <- "STATS_test2_Aw_20130101_20141231_Water.Temp.C.csv" -myDir <- "Data4_Stats" -setwd("..") #resets to vignette dir when run as code chunk -myData <- read.csv(file.path(getwd(),myDir,myFile), stringsAsFactors=FALSE) -# Subset -Col.Keep <- c("SiteID", "TimeValue", "max", "min", "mean") -sitedata <- myData[myData[,"TimeFrame"]=="day",Col.Keep] -Names.ST <- c("SiteID", "Date", "MaxT", "MinT", "MeanT") -names(sitedata) <- Names.ST -# Convert date column to date type -sitedata[,"Date"] <- as.Date(sitedata[,"Date"]) -knitr::kable(head(sitedata)) -# 1.3. Use user data that has been QCed -myData <- DATA_period_test2_Aw_20130101_20141231 -sitedata <- Export.StreamThermal(myData) -knitr::kable(head(sitedata)) -# Library Name -myLibrary <- "ContDataQC" # "ContDataQC","MMIcalc","MMIcalcNV","MBSStools","XC95" -# Load Library -library(devtools) -getwd() -# generate Vignette -devtools::build_vignettes() -# Library Name -myLibrary <- "ContDataQC" # "ContDataQC","MMIcalc","MMIcalcNV","MBSStools","XC95" -# Load Library -library(devtools) -devtools::build_vignettes() -# Parameters -Selection.Operation <- c("GetGageData","QCRaw", "Aggregate", "SummaryStats") -Selection.Type <- c("Air","Water","AW","Gage","AWG","AG","WG") -Selection.SUB <- c("Data1_RAW","Data2_QC","Data3_Aggregated","Data4_Stats") -myDir.BASE <- getwd() -# Create data directories -myDir.create <- paste0("./",Selection.SUB[1]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[2]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[3]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -myDir.create <- paste0("./",Selection.SUB[4]) -ifelse(dir.exists(myDir.create)==FALSE,dir.create(myDir.create),"Directory already exists") -# Save example data (assumes directory ./Data1_RAW/ exists) -myData <- data_raw_test2_AW_20130426_20130725 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20130426_20130725.csv")) -myData <- data_raw_test2_AW_20130725_20131015 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20130725_20131015.csv")) -myData <- data_raw_test2_AW_20140901_20140930 -write.csv(myData,paste0("./",Selection.SUB[1],"/test2_AW_20140901_20140930.csv")) -myData <- data_raw_test4_AW_20160418_20160726 -write.csv(myData,paste0("./",Selection.SUB[1],"/test4_AW_20160418_20160726.csv")) -myFile <- "config.TZ.Central.R" -file.copy(file.path(path.package("ContDataQC"),"extdata",myFile),file.path(getwd(),Selection.SUB[1],myFile)) -devtools::build_vignettes() -# Document, Install, and Reload Library -## Generate Documentation -setwd(paste0("./",myLibrary)) -devtools::document() -## Install New Package (locally) -setwd("..") # return to root directory first -devtools::install(myLibrary) -## Reload library -library(myLibrary,character.only = TRUE) -# change wd back to package -setwd(paste0("./",myLibrary)) -library(ContDataQC) -??ContDataQC -# Document, Install, and Reload Library -## Generate Documentation -setwd(paste0("./",myLibrary)) -devtools::document() -## Install New Package (locally) -setwd("..") # return to root directory first -devtools::install(myLibrary) -## Reload library -library(myLibrary,character.only = TRUE) -# change wd back to package -setwd(paste0("./",myLibrary)) -# Library Name -myLibrary <- "ContDataQC" # "ContDataQC","MMIcalc","MMIcalcNV","MBSStools","XC95" -# Load Library -library(devtools) -# Document, Install, and Reload Library -## Generate Documentation -setwd(paste0("./",myLibrary)) -devtools::document() -## Install New Package (locally) -setwd("..") # return to root directory first -devtools::install(myLibrary) -## Reload library -library(myLibrary,character.only = TRUE) -# change wd back to package -setwd(paste0("./",myLibrary)) -library(ContDataQC) -??ContDataQC diff --git a/DESCRIPTION b/DESCRIPTION index 42e08ba..c0a0fa9 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,6 +1,6 @@ Package: ContDataQC Title: Quality Control (QC) of Continous Monitoring Data -Version: 2.0.1.9041 +Version: 2.0.1.9043 Authors@R: c( person("Erik W.", "Leppo", email="Erik.Leppo@tetratech.com",role=c("aut","cre")), person("Ann","Roseberry Lincoln", role="ctb"), @@ -8,13 +8,7 @@ Authors@R: c( person("John", "van Sickles", role="ctb")) Description: Quality control checks on continuous data files from Hobo data loggers. Also aggregates and subsets files by a given date range. And runs basic statistics. Outputs reports to Word (requires knitr and Pandoc). Depends: - R (>= 3.3.3), - dataRetrieval, - zoo, - knitr, - survival, - doBy, - rmarkdown + R (>= 3.3.3) License: CC0 Encoding: UTF-8 LazyData: true @@ -22,5 +16,9 @@ URL: https://github.com/leppott/ContDataQC BugReports: https://github.com/leppott/ContDataQC/issues RoxygenNote: 6.0.1 Suggests: knitr, - rmarkdown + rmarkdown, + dataRetrieval, + zoo, + survival, + doBy, VignetteBuilder: knitr diff --git a/LOG.Items.20170920.131124.tab b/LOG.Items.20170920.131124.tab deleted file mode 100644 index 81fad54..0000000 --- a/LOG.Items.20170920.131124.tab +++ /dev/null @@ -1,5 +0,0 @@ -"ItemID" "Status" "ItemName" -1 "COMPLETE" "test2_AW_20130426_20130725.csv" -2 "COMPLETE" "test2_AW_20130725_20131015.csv" -3 "COMPLETE" "test2_AW_20140901_20140930.csv" -4 "SKIPPED (Non-Match, SiteID)" "test4_AW_20160418_20160726.csv" diff --git a/LOG.Items.20170920.131143.tab b/LOG.Items.20170920.131143.tab deleted file mode 100644 index 7f0aaa7..0000000 --- a/LOG.Items.20170920.131143.tab +++ /dev/null @@ -1,4 +0,0 @@ -"ItemID" "Status" "ItemName" -1 "COMPLETE" "QC_test2_Aw_20130426_20130725.csv" -2 "COMPLETE" "QC_test2_Aw_20130725_20131015.csv" -3 "COMPLETE" "QC_test2_Aw_20140901_20140930.csv" diff --git a/NAMESPACE b/NAMESPACE index f00b3ae..e4f6eca 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,5 +1,6 @@ # Generated by roxygen2: do not edit by hand +export(CompSiteCDF) export(ContDataQC) export(Export.IHA) export(Export.StreamThermal) diff --git a/NEWS.md b/NEWS.md index 31479b2..0aabe8e 100644 --- a/NEWS.md +++ b/NEWS.md @@ -2,22 +2,20 @@ NEWS-ContDataQC ================ - #> Last Update: 2017-09-20 13:49:24 + #> Last Update: 2017-09-27 09:50:48 Version history. Planned Updates =============== -- Spell out "AW"" and other abbreviations (e.g., AirWater). 20170308. On hold. +- ~~Spell out "AW"" and other abbreviations (e.g., AirWater). 20170308. On hold.~~ -- Gaps in data not always evident in the plots. 20170308. +- ~~Gaps in data not always evident in the plots. 20170308.~~ -- Use futile.logger to better log output for user. Issue \#29. 20170606. +- ~~Use futile.logger to better log output for user. Issue \#29. 20170606.~~ -- Add option for Report output for PDF or DOCX or HTML. Defaults to DOCSX. Will need to change documentation for report functions. 20170823. - -- ~~Flesh out export for StreamThermal. 20170919.~~ +- ~~Add option for Report output for PDF or DOCX or HTML. Defaults to DOCSX. Will need to change documentation for report functions. 20170823.~~ - Debug Aggregate operation. 20170919. @@ -27,12 +25,33 @@ Planned Updates - Update Vignette when done. 20170919. +- Run devtools::check(). 20170926. + +v2.0.1.9043 +=========== + +2017-09-27 + +- DESCRIPTION. As suggested by check(), move packages from Depends to Suggests. 20170927. + +- Update ReadMe. 20170927. + +v2.0.1.9042 +=========== + +2017-09-21 + +- CompSiteCDF() to compare multiple sites on a site CDF. 20170921. + v2.0.1.9041 =========== 2017-09-20 -- Export.StreamThermal. Rename from Export.ST() to Export.StreamThermal(). And update Vignette. 20170920. +- Export.StreamThermal. +- Rename from Export.ST() to Export.StreamThermal() .20170920. +- Complete function and documentation. 20170920. +- Update Vignette. 20170920. - Export.IHA. Tweak opening text. 20170920. diff --git a/NEWS.rmd b/NEWS.rmd index ebb94e1..640820c 100644 --- a/NEWS.rmd +++ b/NEWS.rmd @@ -39,6 +39,20 @@ Version history. * Update Vignette when done. 20170919. +* Run devtools::check(). 20170926. + +# v2.0.1.9043 +2017-09-27 + +* DESCRIPTION. As suggested by check(), move packages from Depends to Suggests. 20170927. + +* Update ReadMe. 20170927. + +# v2.0.1.9042 +2017-09-21 + +* CompSiteCDF() to compare multiple sites on a site CDF. 20170921. + # v2.0.1.9041 2017-09-20 diff --git a/R/CompSiteCDF.R b/R/CompSiteCDF.R new file mode 100644 index 0000000..9699fc7 --- /dev/null +++ b/R/CompSiteCDF.R @@ -0,0 +1,176 @@ +#' CompSiteCDF, compare CDFs of sites +#' +#' Takes as an input a data frame with date and up to 5 columns of parameter data. +#' Column names are SiteIDs and values are daily means for some measurement. +#' +#' CDFs are generate for year, season, and year/season and saved to a PDF +#' +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Erik.Leppo@tetratech.com (EWL) +# 20170921 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#' @param df.input Input data as CSV. Needs 6 columns (SampleID and up to 5 SiteIDs). +#' @param dir.input Directory where data file is located. +#' @param dir.output Directory where PDF file is to be saved. +#' @param ParamName.xlab Parameter name for x-axis on plots +#' @return Returns a PDF of CDFs. +#' @keywords continuous data, CDF, site comparison +#' @examples +#' # load bio data +#' df.data <- data_CompSiteCDF +#' dim(df.data) +#' View(df.data) +#' +#' # subsample +#' mySize <- 200 +#' Seed.MS <- 18171210 +#' bugs.mysize <- rarify(inbug=DF.biodata, sample.ID="SampRep",abund="Count",subsiz=mySize, mySeed=Seed.MS) +#' dim(bugs.mysize) +#' View(bugs.mysize) +#' # save the data +#' write.table(bugs.mysize,paste("bugs",mySize,"txt",sep="."),sep="\t") +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#' @export +CompSiteCDF <- function(df.input, dir.input, dir.output, ParamName.xlab){##FUNCTION.rarify.START + + # add year, month, season + # assume Date is POSIXct + + + + #for testing load ContData.env + source(file.path(getwd(),"R","config.R")) + + ParamName.xlab <- ContData.env$myLab.WaterTemp + + + #df.data <- data_CompSiteCDF + wd <- getwd() + myFile <- "CDF_WaterTemp_2014_MA.csv" + data.import <- read.csv(file.path(wd,"data-raw",myFile)) + + + # Site Names (Columns) + Col.Sites <- names(data.import)[!(names(data.import) %in% ContData.env$myName.Date)] + + + # # Add columns + # myName.Yr <- "Year" + # myName.Mo <- "Month" + # myName.Season <- "Season" + # myName.YrSeason <- "YearSeason" + + + + # add time period fields + data.import[,ContData.env$myName.Yr] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y") + data.import[,ContData.env$myName.Mo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m") + data.import[,ContData.env$myName.YrMo] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%Y%m") + data.import[,ContData.env$myName.MoDa] <- format(as.Date(data.import[,ContData.env$myName.Date]),format="%m%d") + # data.import[,ContData.env$myName.JuDa] <- as.POSIXlt(data.import[,ContData.env$myName.Date], format=ContData.env$myFormat.Date)$yday +1 + # ## add Season fields + data.import[,ContData.env$myName.Season] <- NA + data.import[,ContData.env$myName.Season][as.numeric(data.import[,ContData.env$myName.MoDa])>=as.numeric("0101") & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Spring.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Summer.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Fall.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])=as.numeric(ContData.env$myTimeFrame.Season.Winter.Start) & as.numeric(data.import[,ContData.env$myName.MoDa])<=as.numeric("1231")] <- "Winter" + data.import[,ContData.env$myName.YrSeason] <- paste(data.import[,ContData.env$myName.Yr],data.import[,ContData.env$myName.Season],sep="") + # + View(data.import) + + + # calc CDF + x <- ecdf(data.import[,Col.Sites[1]]) + plot(x, col="blue") + + # plot + i=1 + + myDate <- format(Sys.Date(),"%Y%m%d") + myTime <- format(Sys.time(),"%H%M%S") + strFile <- "CompSiteCDF_" + strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") + + # Color Blind Palatte + # http://www.cookbook-r.com/Graphs/Colors_(ggplot2)/ + # The palette with grey: + cbPalette <- c("#999999", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7") + + # The palette with black: + cbbPalette <- c("#000000", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7") + + + #~~~~~~~~~~~~~~~~~~~ + { + myDate <- format(Sys.Date(),"%Y%m%d") + myTime <- format(Sys.time(),"%H%M%S") + strFile <- "CompSiteCDF_" + strFile.Out <- paste(paste("CompSiteCDF",myDate,myTime,sep=ContData.env$myDelim),"pdf",sep=".") + # + pdf(file=strFile.Out, width=7, height=10)##PDF.START + par(mfrow=c(2,1)) + + myColors <- cbPalette #rainbow(length(Col.Sites)) + + # PLOT 1 + for (j in 1:length(Col.Sites)){##FOR.j.START + # subset out NA + data.j <- data.import[,Col.Sites[j]] + # different first iteration + if (j==1) {##IF.j==1,START + hist(data.import[,Col.Sites[j]], prob=TRUE, border="white" + ,main="All Data", xlab=ParamName.xlab, ylab="Proportion = value") + box() + }##IF.j==1.END + # plot lines + lines(density(data.j, na.rm=TRUE), col=myColors[j], lwd=2) + }##FOR.j.END + legend("topright",Col.Sites,fill=myColors) + + + # Plot 2 + myLWD <- 1.5 + for (i in 1:length(Col.Sites)){##FOR.i.START + #myColors <- cbPalette #rainbow(length(Col.Sites)) + if(i==1){##IF.i==1.START + plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=myLWD, do.p=FALSE #pch=19, cex=.75 #do.p=FALSE + #, col.01line="white" + , main="All Data", xlab=ParamName.xlab, ylab="Proportion <= value" ) + } else { + plot(ecdf(data.import[,Col.Sites[i]]), col=myColors[i], verticals=TRUE, lwd=myLWD, do.p=FALSE, add=T) + }##IF.i==1.END + }##FOR.i.END + legend("bottomright",Col.Sites,fill=myColors) + + + dev.off() ##PDF.END + } + #~~~~~~~~~~~~~~~~~~~ + + + #Plot Proportion equal to each value. + # Round to single digit first + x <- round(data.import$Browns,1) + y <- as.data.frame(table(x)) + + z <- sum(y$Freq) + y$Freq <- y$Freq/z + y$x <- as.numeric(y$x) # works for plot but converts to rowID + + + plot(y, type="l") + + + AA <- data.import$Browns[!is.na(data.import$Browns)] + + hist(AA, prob=TRUE) + lines(density(AA), col="blue", lwd=2) + box() + + + cat(paste0("PDF created; ",strFile.Out)) + flush.console() + + +} #end of function; ##FUNCTION.rarify.END diff --git a/R/data.r b/R/data.r index a87870f..553d21f 100644 --- a/R/data.r +++ b/R/data.r @@ -52,3 +52,12 @@ #' "data_bio2rarify" #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#' data_CompSiteCDF +#' +#' Test data for CompSiteCDF(). Includes daily mean water temp (C) data for 5. +#' +#' @format a data frame with 349 observations and 6 variables: +#' +#' +"data_CompSiteCDF" +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/README.Rmd b/README.Rmd index c528de1..ca6ce25 100644 --- a/README.Rmd +++ b/README.Rmd @@ -5,11 +5,11 @@ output: github_document -```{r, echo = FALSE} +```{r setup, echo = FALSE} knitr::opts_chunk$set( - collapse = TRUE, - comment = "#>", - fig.path = "README-" + collapse = TRUE + , comment = "#>" + , fig.path = "README-" ) ``` @@ -22,20 +22,18 @@ Quality control checks on continuous data. Example data is from a HOBO data log Installation ----------------- -'# Installing just this library (should get all dependancies) +```{r, eval=FALSE} +# Installing just this library (should get all dependancies) library(devtools) install.git_hub("leppott/ContDataQC") +``` -'# Installing dependancies separately -'# set CRAN mirror -'#(loads gui in R; in R-Studio select ## of mirror in Console pane) -'# If know mirror can use "ind=" in 2nd statement and comment out (prefix line with #) the first. -chooseCRANmirror() -'#chooseCRANmirror(ind=21) -'################################################ -'# must run "chooseCRANmirror()" by itself before running the rest of the script +If dependant libraries do not load you can install them separately. -'# libraries to be installed +```{r, eval=FALSE} +# Choose a CRAN mirror (dowload site) first (can change number) +chooseCRANmirror(ind=21) +# libraries to be installed data.packages = c( "devtools" # install helper for non CRAN libraries ,"installr" # install helper @@ -50,89 +48,66 @@ data.packages = c( ,"evaluate" # a dependency that is sometimes missed. ,"highr" # a dependency that is sometimes missed. ,"rmarkdown" # a dependency that is sometimes missed. -'# ,"reshape" # list to matrix -'# ,"lattice" # plotting -'# ,"waterData" # QC of hydro time series data -'# ,"summaryBy" # used in summary stats ) lapply(data.packages,function(x) install.packages(x)) +``` + +Additionally Pandoc is required for creating the reports and needs to be installed separately. -'## pandoc +```{r, eval=FALSE} +## pandoc require(installr) install.pandoc() - +``` Purpose -------------- Built for a project for USEPA for Regional Monitoring Networks (RMN). -Takes continuous data from data loggers and QCs it by checking for gross differences, spikes, rate of change differences, flat line (consecutive same values), and data gaps. +Takes as input continuous data from data loggers and QCs it by checking for gross differences, spikes, rate of change differences, flat line (consecutive same values), and data gaps. The `ContDataQC` package provides a organized workflow to QC, aggregate, partition, and generate summary stats. -Scripts provide a organized workflow to QC, aggregate, partition, and generate summary stats. +The code was presented at the following workshops. And further developed under contract to USEPA. -This gitrepository is a work in progress and should be considered draft. +* Oct 2015, SWPBPA (Region 4 regional biologist meeting, Myrtle Beach, SC). -The code was presented at the following workshops. Oct 2015, SWPBPA (Region 4 regional biologist meeting, Myrtle Beach, SC) Mar 2016, AMAAB (Region 3 regional biologist meeting, Cacapon, WV) Apr 2016, NWQMC (National Water Monitoring Council Conference, Tampa, FL). +* Mar 2016, AMAAB (Region 3 regional biologist meeting, Cacapon, WV). -Functions developed to help data generators handle data from continuous data sensors (e.g., HOBO data loggers). +* Apr 2016, NWQMC (National Water Monitoring Council Conference, Tampa, FL). -From a single function, ContDataQC(), can QC, aggregate, or calculate summary stats on data. Uses the USGS dataRetrieval library to get USGS gage data. Reports are generated in Word (through the use of knitr and Pandoc). +Functions were developed to help data generators handle data from continuous data sensors (e.g., HOBO data loggers). + +From a single function, ContDataQC(), can QC, aggregate, or calculate summary stats on data. `ContDataQC` Uses the USGS dataRetrieval library to get USGS gage data. Reports are generated in Word (through the use of knitr and Pandoc). Usage ------------ -'# load library and dependant libraries +Everytime R is launched the `ContDataQC` package needs to be loaded. +```{r, eval=FALSE} +# load library and dependant libraries require("ContDataQC") +``` - Define working Directory -'# if specify directory use "/" not "\" (as used in Windows) and leave off final "/" (example below). -'#myDir.BASE <- "C:/Users/Erik.Leppo/Documents/NCEA_DataInfrastructure/Erik" +The default working directory is based on how R was installed but is typically the user's 'MyDocuments' folder. You can change it through the menu bar in R (File - Change dir) or RStudio (Session - Set Working Directory). You can also change it from the command line. + +```{r, eval=FALSE} +# if specify directory use "/" not "\" (as used in Windows) and leave off final "/" (example below). +#myDir.BASE <- "C:/Users/Erik.Leppo/Documents/NCEA_DataInfrastructure/Erik" myDir.BASE <- getwd() setwd(myDir.BASE) -'# library (load any required helper functions) -'#source(paste(myDir.BASE,"Scripts","fun.Master.R",sep="/")) -'##################################################################### -'# USER input in this section (see end of script for explanations) -'##################################################################### -'# -'# PROMPT; Operation -Selection.Operation <- c("GetGageData","QCRaw", "Aggregate", "SummaryStats") -myData.Operation <- Selection.Operation[3] #number corresponds to intended operation in the line above -'# -'# PROMPT; Site ID -'# single site; "ECO66G12" -'# group of sites; c("test2", "HRCC", "PBCC", "ECO66G12", "ECO66G20", "ECO68C20", "01187300") -myData.SiteID <- "ECO71F19" -'# -'# PROMPT; Data Type -'# Type of data file -Selection.Type <- c("Air","Water","AW","Gage","AWG","AG","WG") # only one at a time -myData.Type <- Selection.Type[3] #number corresponds to intended operation in the line above -'# -'# PROMPT; Start Date -'# YYYY-MM-DD ("-" delimiter), leave blank for all data ("1900-01-01") -myData.DateRange.Start <- "2013-01-01" -'# -'# PROMPT; End Date -'# YYYY-MM-DD ("-" delimiter), leave blank for all data (today) -myData.DateRange.End <- "2014-12-31" -'###################################################################### -'# PROMPT; SubDirectory, input file location. Leave blank for defaults -Selection.SUB <- c("Data1_RAW","Data2_QC","Data3_Aggregated","Data4_Stats") -myDir.SUB.import <- "" #Selection.SUB[2] -'# -'# PROMPT; SubDirectory, output file location. Leave blank for default. -myDir.SUB.export <- "" #Selection.SUB[3] -'# -'##################################################################### -'# Run the script with the above user defined values -ContDataQC(myData.Operation - ,myData.SiteID - ,myData.Type - ,myData.DateRange.Start - ,myData.DateRange.End - ,myDir.BASE - ,myDir.SUB.import - ,myDir.SUB.export) +``` + + +Help +------------ +Every function has a help file with a working example. There is also a vignette with descriptions and examples of all functions in the `ContDataQC` library. + +```{r, eval=FALSE} +# To get help on a function +# library(ContDataQC) # the library must be loaded before accessing help +?ContDataQC +``` +```{r, eval=FALSE} +vignette("ContDataQC_Vignette",package="ContDataQC") +``` diff --git a/README.md b/README.md index 2483c52..139b216 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ README-ContDataQC ================ - #> Last Update: 2017-03-08 08:33:56 + #> Last Update: 2017-09-27 09:12:41 ContDataQC ========== @@ -12,37 +12,94 @@ Quality control checks on continuous data. Example data is from a HOBO data logg Installation ------------ -'\# Installing just this library (should get all dependancies) library(devtools) install.git\_hub("leppott/ContDataQC") - -'\# Installing dependancies separately '\# set CRAN mirror '\#(loads gui in R; in R-Studio select \#\# of mirror in Console pane) '\# If know mirror can use "ind=" in 2nd statement and comment out (prefix line with \#) the first. chooseCRANmirror() '\#chooseCRANmirror(ind=21) '\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\# '\# must run "chooseCRANmirror()" by itself before running the rest of the script - -'\# libraries to be installed data.packages = c( -"devtools" \# install helper for non CRAN libraries ,"installr" \# install helper ,"digest" \# caused error in R v3.2.3 without it ,"dataRetrieval" \# loads USGS data into R ,"knitr" \# create documents in other formats (e.g., PDF or Word) ,"doBy" \# summary stats ,"zoo" \# z's ordered observations, use for rolling sd calc ,"htmltools" \# needed for knitr and doesn't always install properly with Pandoc ,"rmarkdown" \# needed for knitr and doesn't always install properly with Pandoc ,"htmltools" \# a dependency that is sometimes missed. ,"evaluate" \# a dependency that is sometimes missed. ,"highr" \# a dependency that is sometimes missed. ,"rmarkdown" \# a dependency that is sometimes missed. '\# ,"reshape" \# list to matrix '\# ,"lattice" \# plotting '\# ,"waterData" \# QC of hydro time series data '\# ,"summaryBy" \# used in summary stats ) - +``` r +# Installing just this library (should get all dependancies) +library(devtools) +install.git_hub("leppott/ContDataQC") +``` + +If dependant libraries do not load you can install them separately. + +``` r +# Choose a CRAN mirror (dowload site) first (can change number) +chooseCRANmirror(ind=21) +# libraries to be installed +data.packages = c( + "devtools" # install helper for non CRAN libraries + ,"installr" # install helper + ,"digest" # caused error in R v3.2.3 without it + ,"dataRetrieval" # loads USGS data into R + ,"knitr" # create documents in other formats (e.g., PDF or Word) + ,"doBy" # summary stats + ,"zoo" # z's ordered observations, use for rolling sd calc + ,"htmltools" # needed for knitr and doesn't always install properly with Pandoc + ,"rmarkdown" # needed for knitr and doesn't always install properly with Pandoc + ,"htmltools" # a dependency that is sometimes missed. + ,"evaluate" # a dependency that is sometimes missed. + ,"highr" # a dependency that is sometimes missed. + ,"rmarkdown" # a dependency that is sometimes missed. + ) + lapply(data.packages,function(x) install.packages(x)) +``` -'\#\# pandoc require(installr) install.pandoc() +Additionally Pandoc is required for creating the reports and needs to be installed separately. + +``` r +## pandoc +require(installr) +install.pandoc() +``` Purpose ------- Built for a project for USEPA for Regional Monitoring Networks (RMN). -Takes continuous data from data loggers and QCs it by checking for gross differences, spikes, rate of change differences, flat line (consecutive same values), and data gaps. +Takes as input continuous data from data loggers and QCs it by checking for gross differences, spikes, rate of change differences, flat line (consecutive same values), and data gaps. The `ContDataQC` package provides a organized workflow to QC, aggregate, partition, and generate summary stats. + +The code was presented at the following workshops. And further developed under contract to USEPA. -Scripts provide a organized workflow to QC, aggregate, partition, and generate summary stats. +- Oct 2015, SWPBPA (Region 4 regional biologist meeting, Myrtle Beach, SC). -This gitrepository is a work in progress and should be considered draft. +- Mar 2016, AMAAB (Region 3 regional biologist meeting, Cacapon, WV). -The code was presented at the following workshops. Oct 2015, SWPBPA (Region 4 regional biologist meeting, Myrtle Beach, SC) Mar 2016, AMAAB (Region 3 regional biologist meeting, Cacapon, WV) Apr 2016, NWQMC (National Water Monitoring Council Conference, Tampa, FL). +- Apr 2016, NWQMC (National Water Monitoring Council Conference, Tampa, FL). -Functions developed to help data generators handle data from continuous data sensors (e.g., HOBO data loggers). +Functions were developed to help data generators handle data from continuous data sensors (e.g., HOBO data loggers). -From a single function, ContDataQC(), can QC, aggregate, or calculate summary stats on data. Uses the USGS dataRetrieval library to get USGS gage data. Reports are generated in Word (through the use of knitr and Pandoc). +From a single function, ContDataQC(), can QC, aggregate, or calculate summary stats on data. `ContDataQC` Uses the USGS dataRetrieval library to get USGS gage data. Reports are generated in Word (through the use of knitr and Pandoc). Usage ----- -'\# load library and dependant libraries require("ContDataQC") +Everytime R is launched the `ContDataQC` package needs to be loaded. + +``` r +# load library and dependant libraries +require("ContDataQC") +``` + +The default working directory is based on how R was installed but is typically the user's 'MyDocuments' folder. You can change it through the menu bar in R (File - Change dir) or RStudio (Session - Set Working Directory). You can also change it from the command line. + +``` r +# if specify directory use "/" not "\" (as used in Windows) and leave off final "/" (example below). +#myDir.BASE <- "C:/Users/Erik.Leppo/Documents/NCEA_DataInfrastructure/Erik" +myDir.BASE <- getwd() +setwd(myDir.BASE) +``` + +Help +---- + +Every function has a help file with a working example. There is also a vignette with descriptions and examples of all functions in the `ContDataQC` library. + +``` r +# To get help on a function +# library(ContDataQC) # the library must be loaded before accessing help +?ContDataQC +``` -Define working Directory '\# if specify directory use "/" not "" (as used in Windows) and leave off final "/" (example below). '\#myDir.BASE <- "C:/Users/Erik.Leppo/Documents/NCEA\_DataInfrastructure/Erik" myDir.BASE <- getwd() setwd(myDir.BASE) '\# library (load any required helper functions) '\#source(paste(myDir.BASE,"Scripts","fun.Master.R",sep="/")) '\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\# '\# USER input in this section (see end of script for explanations) '\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\# '\# '\# PROMPT; Operation Selection.Operation <- c("GetGageData","QCRaw", "Aggregate", "SummaryStats") myData.Operation <- Selection.Operation\[3\] \#number corresponds to intended operation in the line above '\# '\# PROMPT; Site ID '\# single site; "ECO66G12" '\# group of sites; c("test2", "HRCC", "PBCC", "ECO66G12", "ECO66G20", "ECO68C20", "01187300") myData.SiteID <- "ECO71F19" '\# '\# PROMPT; Data Type '\# Type of data file Selection.Type <- c("Air","Water","AW","Gage","AWG","AG","WG") \# only one at a time myData.Type <- Selection.Type\[3\] \#number corresponds to intended operation in the line above '\# '\# PROMPT; Start Date '\# YYYY-MM-DD ("-" delimiter), leave blank for all data ("1900-01-01") myData.DateRange.Start <- "2013-01-01" '\# '\# PROMPT; End Date '\# YYYY-MM-DD ("-" delimiter), leave blank for all data (today) myData.DateRange.End <- "2014-12-31" '\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\# '\# PROMPT; SubDirectory, input file location. Leave blank for defaults Selection.SUB <- c("Data1\_RAW","Data2\_QC","Data3\_Aggregated","Data4\_Stats") myDir.SUB.import <- "" \#Selection.SUB\[2\] '\# '\# PROMPT; SubDirectory, output file location. Leave blank for default. myDir.SUB.export <- "" \#Selection.SUB\[3\] '\# '\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\#\# '\# Run the script with the above user defined values ContDataQC(myData.Operation ,myData.SiteID ,myData.Type ,myData.DateRange.Start ,myData.DateRange.End ,myDir.BASE ,myDir.SUB.import ,myDir.SUB.export) +``` r +vignette("ContDataQC_Vignette",package="ContDataQC") +``` diff --git a/data-raw/CDF_WaterTemp_2014_MA.csv b/data-raw/CDF_WaterTemp_2014_MA.csv new file mode 100644 index 0000000..91d2241 --- /dev/null +++ b/data-raw/CDF_WaterTemp_2014_MA.csv @@ -0,0 +1,350 @@ +Date,Browns,Hubbard,Parkers,Cold,WBrSwift +2014-01-01,0.512020833,0.069083333,0.266916667,-0.004,0.009416667 +2014-01-02,0.321583333,0.078416667,0.156145833,-0.004,-0.004 +2014-01-03,0.024,0.079,0.102916667,-0.001083333,0.015833333 +2014-01-04,0.024,0.079,0.1385,-0.001666667,-0.0005 +2014-01-05,0.280625,0.079,0.207208333,-0.001666667,-0.003416667 +2014-01-06,0.519770833,0.079,0.216145833,-0.00925,-0.004 +2014-01-07,0.121979167,0.055083333,0.010479167,-0.004583333,-0.004 +2014-01-08,0.024,0.072583333,0.0616875,-0.004,-0.004 +2014-01-09,0.0245625,0.079583333,0.1300625,-0.004,-0.004 +2014-01-10,0.239125,0.079,0.235291667,-0.004,0.003 +2014-01-11,0.786895833,0.079,0.663625,-0.004583333,0.01525 +2014-01-12,0.845229167,0.079,0.9043125,0.021666667,-0.003416667 +2014-01-13,0.9039375,0.093,1.28425,0.125541667,0.035229167 +2014-01-14,1.567791667,0.149,2.149479167,0.395416667,0.246770833 +2014-01-15,1.443145833,0.1804375,1.413729167,0.522083333,0.261145833 +2014-01-16,1.765625,0.858833333,1.801043478,0.748375,0.575166667 +2014-01-17,1.907291667,0.8151875,2.0841875,0.506104167,0.922208333 +2014-01-18,1.665791667,0.548958333,1.617625,0.063020833,0.8194375 +2014-01-19,1.244895833,0.143208333,1.453645833,-0.004,0.575875 +2014-01-20,1.188875,0.330854167,1.465875,0.002375,0.736375 +2014-01-21,0.2408125,0.072020833,0.349854167,-0.005166667,0.009270833 +2014-01-22,0.024,0.079,0.06675,-0.002833333,-0.004 +2014-01-23,0.024,0.079,0.11225,-0.002833333,-0.004 +2014-01-24,0.025125,0.079,0.133125,-0.002833333,0.007083333 +2014-01-25,0.026270833,0.079,0.187416667,-0.004,0.024 +2014-01-26,0.079770833,0.079,0.172375,-0.004,0.024 +2014-01-27,0.276166667,0.079,0.281354167,-0.004,0.048833333 +2014-01-28,0.0285,0.079,0.176458333,-0.004,0.024 +2014-01-29,0.035375,0.079,0.187520833,-0.002833333,0.0245625 +2014-01-30,0.039958333,0.079,0.1770625,-0.002833333,0.0256875 +2014-01-31,0.231541667,0.079,0.434958333,-0.004,0.0739375 +2014-02-01,0.5375,0.079583333,0.913625,-0.0005,0.171833333 +2014-02-02,0.722083333,0.079583333,1.540729167,0.022833333,0.278 +2014-02-03,0.579729167,0.079,1.420583333,0.056979167,0.343729167 +2014-02-04,0.372583333,0.0825,0.800604167,0.065583333,0.360395833 +2014-02-05,0.146270833,0.079,0.4320625,0.065,0.138104167 +2014-02-06,0.0458125,0.083083333,0.2808125,0.097666667,0.123125 +2014-02-07,0.043395833,0.081916667,0.119645833,0.118666667,0.013375 +2014-02-08,0.0555625,0.081916667,0.160520833,0.128,0.043458333 +2014-02-09,0.161854167,0.098833333,0.494625,0.149,0.190125 +2014-02-10,0.132416667,0.10525,0.508145833,0.17875,0.1901875 +2014-02-11,0.040520833,0.087166667,0.115729167,0.1894375,0.0064375 +2014-02-12,0.041645833,0.087166667,0.139291667,0.169083333,0.008833333 +2014-02-13,0.047625,0.09475,0.145854167,0.150166667,0.024416667 +2014-02-14,0.042645833,0.086583333,0.187916667,0.132041667,0.05025 +2014-02-15,0.10175,0.107,0.430479167,0.219166667,0.104666667 +2014-02-16,0.113416667,0.109333333,0.598833333,0.231416667,0.144083333 +2014-02-17,0.07325,0.109333333,0.3838125,0.238375,0.080958333 +2014-02-18,0.0523125,0.109333333,0.1615,0.241916667,0.043333333 +2014-02-19,0.06675,0.116333333,0.3304375,0.28425,0.077625 +2014-02-20,0.1839375,0.132666667,1.1516875,0.330958333,0.256604167 +2014-02-21,0.160270833,0.083666667,1.448458333,0.094875,0.061625 +2014-02-22,0.325041667,0.08425,1.418666667,-0.004,0.114375 +2014-02-23,0.481,0.114,1.694458333,-0.004,0.2305625 +2014-02-24,0.340604167,0.11925,1.518979167,0.0468125,0.211583333 +2014-02-25,0.0563125,0.1035,0.600270833,0.111666667,0.058416667 +2014-02-26,0.046458333,0.09125,0.291375,0.178270833,-0.004 +2014-02-27,0.030854167,0.085416667,0.199166667,0.1518125,-0.004 +2014-02-28,0.0245625,0.08425,0.184166667,0.100583333,-0.004 +2014-03-01,0.0410625,0.094166667,0.2345,0.0605625,-0.004 +2014-03-02,0.118479167,0.114583333,0.9725625,0.15725,0.076979167 +2014-03-03,0.1006875,0.1,0.796125,0.174104167,0.037625 +2014-03-04,0.050916667,0.093,0.247833333,0.095333333,-0.001666667 +2014-03-05,0.1051875,0.107583333,0.705729167,0.118666667,0.050395833 +2014-03-06,0.0798125,0.098833333,0.5159375,0.070854167,0.02375 +2014-03-07,0.061958333,0.09825,0.5229375,0.0335625,0.028833333 +2014-03-08,0.232895833,0.11575,1.655416667,0.10525,0.133583333 +2014-03-09,0.423354167,0.129083333,2.1703125,0.231791667,0.231083333 +2014-03-10,0.490729167,0.1546875,2.176166667,0.308958333,0.2993125 +2014-03-11,0.707958333,0.156895833,3.012208333,0.322166667,0.5150625 +2014-03-12,0.648416667,0.10875,2.317958333,0.086208333,0.347541667 +2014-03-13,0.123291667,0.077833333,0.389104167,-0.004,0.001229167 +2014-03-14,0.169333333,0.092416667,0.488270833,0.072416667,0.02075 +2014-03-15,0.7938125,0.156291667,2.075770833,0.265854167,0.352083333 +2014-03-16,0.471104167,0.132541667,1.550479167,0.316979167,0.289479167 +2014-03-17,0.387104167,0.111,1.0355,0.274541667,0.1143125 +2014-03-18,0.515145833,0.110479167,1.127458333,0.185020833,0.128229167 +2014-03-19,0.8966875,0.144520833,1.595791667,0.242729167,0.237208333 +2014-03-20,0.895770833,0.111645833,1.963395833,0.343666667,0.253145833 +2014-03-21,1.0946875,0.118083333,1.776791667,0.4196875,0.3411875 +2014-03-22,1.516041667,0.172354167,2.067895833,0.496916667,0.5011875 +2014-03-23,1.48775,0.1701875,2.188854167,0.5485,0.680229167 +2014-03-24,0.953270833,0.110333333,1.367395833,0.335625,0.194979167 +2014-03-25,1.024833333,0.10275,1.41575,0.215708333,0.155875 +2014-03-26,1.070770833,0.168291667,1.698,0.232458333,0.351875 +2014-03-27,1.0091875,0.158770833,1.6481875,0.1374375,0.3065625 +2014-03-28,2.06125,0.38275,2.8915625,0.178666667,0.907854167 +2014-03-29,2.352083333,0.345854167,3.002041667,-0.004,0.823479167 +2014-03-30,1.331041667,0.10875,1.092833333,-0.004,0.106979167 +2014-03-31,1.143479167,0.295208333,0.924375,-0.004,0.187854167 +2014-04-01,2.610895833,1.1053125,2.239541667,0.022,0.8819375 +2014-04-02,3.751979167,1.661125,2.899229167,0.090666667,1.299104167 +2014-04-03,4.670041667,2.1811875,3.606229167,0.213354167,1.847895833 +2014-04-04,4.965458333,2.423791667,3.9515625,0.302645833,2.2485625 +2014-04-05,4.71125,2.429770833,4.0304375,0.2785,2.373083333 +2014-04-06,4.766458333,2.550791667,4.322458333,0.831229167,2.660625 +2014-04-07,5.439,3.026791667,4.685145833,1.214375,3.042104167 +2014-04-08,6.821541667,3.846041667,5.741583333,0.950270833,3.884291667 +2014-04-09,6.941395833,4.464145833,5.885083333,1.548041667,4.0414375 +2014-04-10,6.838791667,4.773770833,5.9778125,1.868729167,4.311854167 +2014-04-11,8.3546875,6.848541667,7.778583333,2.442145833,6.276770833 +2014-04-12,9.953395833,7.775145833,8.719833333,2.865791667,7.0854375 +2014-04-13,10.48754167,8.563895833,8.466604167,3.3743125,6.996625 +2014-04-14,12.8389375,10.87395833,10.94802083,5.525083333,9.297208333 +2014-04-15,12.88952083,10.9569375,11.55504167,5.565020833,10.11129167 +2014-04-16,8.296791667,5.385125,7.9656875,2.038145833,6.164791667 +2014-04-17,7.561666667,5.2508125,6.9755625,2.1456875,5.389270833 +2014-04-18,6.959854167,5.3945,6.511666667,2.334854167,5.153166667 +2014-04-19,8.323270833,6.680104167,7.605354167,3.859666667,5.978 +2014-04-20,9.0206875,7.583479167,8.22425,4.069270833,6.544083333 +2014-04-21,9.762145833,8.447729167,8.920416667,5.036833333,7.241708333 +2014-04-22,11.46504167,9.855708333,10.077375,6.265833333,8.259583333 +2014-04-23,10.72372917,9.5748125,9.8945625,5.804041667,8.28925 +2014-04-24,9.31375,7.735625,8.580083333,4.460468085,7.343255319 +2014-04-25,9.9341875,8.119979167,9.123425532,4.891895833,7.556583333 +2014-04-26,9.548375,8.476291667,8.8609375,5.340145833,7.651583333 +2014-04-27,9.104020833,8.469416667,8.18175,4.89175,7.588291667 +2014-04-28,9.626916667,8.737416667,8.8415,5.13975,7.867583333 +2014-04-29,9.0990625,8.662916667,8.644395833,5.545166667,7.8048125 +2014-04-30,7.5396875,6.462416667,7.057270833,3.908375,6.59725 +2014-05-01,8.924791667,7.295354167,7.637875,5.934,7.050708333 +2014-05-02,11.10885417,9.570041667,10.00704167,6.694166667,8.794333333 +2014-05-03,11.61133333,10.02616667,10.46295833,6.8320625,9.106625 +2014-05-04,11.45108333,10.12579167,10.8273125,6.1746875,9.012145833 +2014-05-05,10.97885417,10.1675,10.31297917,6.207833333,8.811291667 +2014-05-06,11.3308125,9.916416667,10.06264583,6.504541667,8.600583333 +2014-05-07,11.6225625,9.71925,10.18420833,6.435125,8.80625 +2014-05-08,11.670375,9.836875,10.892,7.892895833,9.4884375 +2014-05-09,12.13239583,10.5123125,11.260875,8.7064375,10.137875 +2014-05-10,14.4274375,12.98704167,12.8491875,10.41552083,11.75727083 +2014-05-11,16.04370833,14.82941667,14.80370833,11.36360417,13.22108333 +2014-05-12,16.614125,15.17779167,14.72191667,11.83033333,13.41270833 +2014-05-13,16.3585625,15.5449375,13.91291667,11.41585417,12.88995833 +2014-05-14,15.15360417,14.49525,12.86014583,11.50683333,12.051375 +2014-05-15,16.7283125,15.07120833,15.01077083,12.168875,13.66979167 +2014-05-16,16.44458333,15.2609375,14.98102083,12.79852083,13.9865 +2014-05-17,16.82672917,14.94904167,15.99104167,11.28522917,14.43522917 +2014-05-18,15.611,14.18779167,14.12654167,9.82025,13.17614583 +2014-05-19,14.3828125,13.43639583,12.7263125,9.440583333,12.01283333 +2014-05-20,15.28435417,14.04283333,13.8443125,10.32464583,12.60827083 +2014-05-21,15.86302083,14.75214583,13.91804167,11.0080625,12.8825 +2014-05-22,15.43329167,14.61910417,13.55289583,10.73347917,12.51164583 +2014-05-23,14.61408333,13.86747917,12.85677083,10.59564583,12.72895833 +2014-05-24,14.40272917,13.70516667,12.33,10.6225625,12.41814583 +2014-05-25,15.20170833,14.51920833,13.46214583,11.6068125,13.2648125 +2014-05-26,15.9641875,15.5776875,14.0873125,12.47089583,13.5314375 +2014-05-27,16.75085417,17.34808333,14.47977083,13.38858333,14.18802083 +2014-05-28,14.12729167,15.01791667,12.05370833,11.0655,12.08539583 +2014-05-29,13.35439583,13.51458333,12.15385417,10.74775,12.05502083 +2014-05-30,14.63979167,14.53910417,13.42147917,11.60570833,13.0541875 +2014-05-31,14.71564583,14.2461875,13.00854167,10.93439583,12.98420833 +2014-06-01,14.61160417,14.57447917,13.351125,11.38316667,13.17935417 +2014-06-02,16.57008333,16.02533333,14.76358333,12.810375,14.18322917 +2014-06-03,18.40604167,18.04041667,16.27258333,13.968375,15.91683333 +2014-06-04,17.97535417,18.83858333,15.90058333,13.66104167,16.09477083 +2014-06-05,17.11552083,17.08722917,14.82416667,12.59227083,14.6470625 +2014-06-06,16.42552083,16.31314583,14.6854375,12.2733125,14.493 +2014-06-07,17.43972917,17.0228125,15.47270833,13.00897917,15.36027083 +2014-06-08,18.52897917,17.8934375,16.05254167,13.68152083,16.18377083 +2014-06-09,18.457,17.6880625,15.37804167,13.75439583,15.67010417 +2014-06-10,18.55925,17.76314583,16.06985417,14.31529167,16.04922917 +2014-06-11,17.901625,17.3833125,15.35927083,13.64545833,15.63591667 +2014-06-12,16.69814583,15.7423125,14.10083333,12.52291667,14.26375 +2014-06-13,16.83404167,15.6743125,14.10370833,12.84014583,14.41825 +2014-06-14,17.21064583,16.876,14.55245833,13.10408333,15.3370625 +2014-06-15,17.05945833,16.99110417,15.04814583,13.02470833,15.694625 +2014-06-16,17.69652083,17.71120833,15.3990625,13.32629167,16.0440625 +2014-06-17,19.4128125,19.28404167,16.76897917,14.94841667,17.5656875 +2014-06-18,21.19970833,20.32222917,18.06645833,15.53502083,18.96858333 +2014-06-19,20.85954167,20.57108333,17.494875,15.638375,18.77179167 +2014-06-20,18.40745833,19.0145,15.57997917,13.9534375,17.02341667 +2014-06-21,17.013,18.05204167,14.5090625,12.959875,15.9139375 +2014-06-22,16.8521875,17.48116667,14.47479167,13.28158333,16.06864583 +2014-06-23,17.72739583,17.49664583,14.9991875,13.91458333,16.82166667 +2014-06-24,18.5026875,18.41189583,15.65633333,14.63345833,17.75208333 +2014-06-25,19.81095833,19.37864583,16.98302083,16.31725,18.92747917 +2014-06-26,21.22889583,20.967875,18.37075,15.12633333,19.1751875 +2014-06-27,20.18310417,20.9101875,18.155125,14.5738125,18.99420833 +2014-06-28,19.87439583,20.26977083,17.27022917,14.61375,18.66879167 +2014-06-29,20.04695833,20.34835417,17.5075,15.42625,19.0555 +2014-06-30,20.51314583,20.55479167,17.97533333,16.00589583,19.63727083 +2014-07-01,21.59008333,21.5288125,18.74422917,17.55475,20.82795833 +2014-07-02,22.55352083,22.22047917,19.09166667,18.5001875,21.53614583 +2014-07-03,22.72252083,21.77025,19.22891667,17.65922917,20.2381875 +2014-07-04,20.46927083,20.08664583,19.070875,15.92375,18.83752083 +2014-07-05,19.06177083,18.3459375,18.28104167,14.45441667,17.55158333 +2014-07-06,19.70577083,18.687,18.103875,15.20110417,17.8788125 +2014-07-07,21.30777083,20.5045,19.07564583,16.33791667,19.36110417 +2014-07-08,22.21389583,21.48435417,19.9311875,16.41866667,19.9965625 +2014-07-09,22.8294375,21.74675,20.53608333,16.8566875,20.62452083 +2014-07-10,21.47464583,20.87833333,19.41445833,16.01364583,19.89358333 +2014-07-11,20.59852083,20.23772917,18.4276875,15.18802083,18.88927083 +2014-07-12,20.48420833,19.99520833,18.34295833,15.3049375,18.85183333 +2014-07-13,21.01947917,20.62445833,18.39054167,16.279125,19.51979167 +2014-07-14,21.45522917,20.60264583,19.09245833,17.63135417,20.77054167 +2014-07-15,22.07554167,21.40708333,19.21064583,17.71979167,20.33179167 +2014-07-16,21.41895833,20.62122917,18.95683333,16.4086875,19.117875 +2014-07-17,20.5345,19.62010417,18.31329167,15.16522917,18.57933333 +2014-07-18,19.71910417,19.7233125,17.65364583,14.8809375,18.32072917 +2014-07-19,18.56554167,18.99854167,16.39510417,14.54122917,17.733125 +2014-07-20,18.880875,18.93677083,16.93525,14.94541667,17.94310417 +2014-07-21,19.01360417,19.16379167,17.05535417,15.37845833,18.09797917 +2014-07-22,20.59175,20.38872917,17.88410417,16.51445833,19.27510417 +2014-07-23,22.2246875,22.2710625,19.17995833,18.276625,20.89989583 +2014-07-24,21.61052083,21.84314583,19.03025,17.524,20.55097917 +2014-07-25,19.90104167,20.12572917,17.32766667,15.95554167,18.6344375 +2014-07-26,19.78160417,19.65327083,17.0743125,16.10558333,18.549125 +2014-07-27,20.36085417,19.9279375,16.88727083,16.21254167,18.4753125 +2014-07-28,20.5120625,20.23204167,17.05308333,16.20372917,18.54175 +2014-07-29,19.736875,19.49758333,17.47989583,15.10339583,18.41733333 +2014-07-30,18.68166667,18.32766667,16.61254167,14.62704167,17.92185417 +2014-07-31,18.39410417,18.41039583,16.10347917,14.27483333,17.41845833 +2014-08-01,20.0396875,19.38375,17.3589375,15.18264583,18.77185417 +2014-08-02,19.6655625,19.64914583,16.94802083,16.1793125,19.0400625 +2014-08-03,18.81825,19.18052083,16.24295833,15.74066667,18.03677083 +2014-08-04,19.77639583,19.49835417,17.4353125,16.33,19.0575625 +2014-08-05,20.7688125,20.061,18.2746875,16.498125,19.76872917 +2014-08-06,20.81658333,20.5154375,18.30891667,16.42341667,19.67672917 +2014-08-07,18.80827083,19.04360417,16.4946875,15.14302083,18.1408125 +2014-08-08,17.94570833,18.39952083,16.07760417,15.08597917,17.62047917 +2014-08-09,18.54495833,18.34791667,16.21910417,15.45214583,18.0255 +2014-08-10,19.07966667,18.5754375,16.38820833,15.75716667,18.50058333 +2014-08-11,18.8211875,18.660625,16.760625,16.02239583,18.86160417 +2014-08-12,18.8449375,18.7576875,16.50420833,15.92791667,18.951 +2014-08-13,18.85927083,18.24985417,16.51697917,15.79010417,18.225625 +2014-08-14,18.03385417,17.275125,16.8358125,14.65641667,17.09975 +2014-08-15,16.85975,16.84045833,15.44927083,13.35302083,16.12785417 +2014-08-16,16.97375,16.65416667,15.41725,13.70466667,15.9586875 +2014-08-17,17.933125,17.71064583,15.88241667,14.3379375,16.50364583 +2014-08-18,17.7005,17.896,15.38083333,14.479625,16.226625 +2014-08-19,16.867,17.27010417,14.88179167,13.93204167,15.71770833 +2014-08-20,17.37341667,17.32310417,15.30620833,14.1524375,16.4555625 +2014-08-21,17.78552083,18.00472917,15.029875,14.77629167,16.827 +2014-08-22,17.7530625,17.84470833,15.15441667,14.43770833,16.69914583 +2014-08-23,17.6583125,17.662375,15.26510417,14.45216667,17.02654167 +2014-08-24,17.0825625,17.22079167,15.27295833,14.19625,16.893375 +2014-08-25,18.03014583,18.0124375,15.90472917,15.02783333,17.4625 +2014-08-26,19.1963125,18.79797917,16.76191667,15.78327083,18.0414375 +2014-08-27,20.05375,19.69504167,17.4995625,16.18775,19.02327083 +2014-08-28,20.0271875,19.61897917,18.4,16.4355,18.884625 +2014-08-29,17.4603125,17.832125,16.15014583,15.13335417,17.10841667 +2014-08-30,17.237375,17.11314583,15.70095833,14.94635417,16.50441667 +2014-08-31,19.16720833,18.65416667,16.58710417,16.30754167,17.9985625 +2014-09-01,20.61802083,19.59877083,18.21916667,17.54570833,19.47116667 +2014-09-02,21.37810417,20.70408333,18.5973125,17.97289583,20.1598125 +2014-09-03,21.01833333,20.49485417,18.87129167,17.83722917,19.95475 +2014-09-04,19.0715625,18.885125,17.346625,16.85483333,18.62597917 +2014-09-05,20.07666667,19.5495625,17.5890625,17.4578125,19.08216667 +2014-09-06,22.06335417,21.3811875,18.930875,18.6408125,20.69745833 +2014-09-07,20.34439583,19.84752083,18.42525,17.26266667,19.39402083 +2014-09-08,17.27070833,17.60275,15.95127083,15.21389583,16.95795833 +2014-09-09,16.17197917,16.359,14.64747917,14.267875,15.8474375 +2014-09-10,16.09702083,16.22602083,14.3903125,14.29908333,15.64735417 +2014-09-11,17.0173125,16.7993125,14.92570833,14.49558333,15.86485417 +2014-09-12,16.584375,16.03083333,14.77522917,13.99660417,15.38758333 +2014-09-13,15.10602083,14.37172917,12.9761875,12.219375,14.053375 +2014-09-14,14.23391667,13.72641667,13.1020625,11.41941667,13.36697917 +2014-09-15,12.84002083,13.05522917,12.155625,10.37035417,12.50210417 +2014-09-16,12.92608333,13.2751875,11.906,10.969875,12.56920833 +2014-09-17,12.8616875,12.68166667,12.20616667,10.46202083,12.36085417 +2014-09-18,12.4741875,12.41741667,11.829,10.3444375,12.22927083 +2014-09-19,12.03054167,11.64002083,11.197375,9.778208333,11.44316667 +2014-09-20,11.295,11.40320833,10.59266667,9.6131875,11.09570833 +2014-09-21,14.44229167,13.7910625,12.98610417,11.86675,13.06858333 +2014-09-22,15.40454167,14.26166667,14.09166667,12.43958333,13.84166667 +2014-09-23,12.37429167,12.28975,11.986125,11.23847917,12.03252083 +2014-09-24,12.6440625,12.45345833,11.75820833,11.38079167,12.37764583 +2014-09-25,13.21870833,13.00929167,11.8369375,11.58279167,12.81691667 +2014-09-26,13.6389375,13.4751875,12.5156875,11.7441875,13.5255 +2014-09-27,14.23127083,13.81704167,12.9230625,12.26841667,13.97872917 +2014-09-28,15.08935417,14.79895833,13.82816667,12.98020833,14.86995833 +2014-09-29,15.84208333,15.3625,14.17472917,13.27779167,15.25329167 +2014-09-30,15.218375,15.17314583,13.3860625,13.35158333,14.845125 +2014-10-01,14.30414583,14.46804167,12.87639583,13.06927083,14.31652083 +2014-10-02,13.906,14.26741667,12.76714583,12.94689583,13.91027083 +2014-10-03,12.75910417,13.49283333,12.09572917,12.29460417,12.94125 +2014-10-04,13.1804375,13.19416667,12.30920833,11.73277083,12.77033333 +2014-10-05,12.17904167,11.73572917,11.72425,9.829916667,11.7716875 +2014-10-06,10.3769375,10.22114583,10.07572917,8.540270833,10.094625 +2014-10-07,12.32916667,11.4545,11.4875625,10.68114583,11.45420833 +2014-10-08,14.27591667,13.09602083,12.93116667,11.60995833,13.49410417 +2014-10-09,12.2718125,11.88341667,11.34472917,9.78675,11.77410417 +2014-10-10,10.52433333,10.3911875,9.851208333,9.027708333,9.749979167 +2014-10-11,9.8173125,9.747604167,9.211645833,8.316958333,9.273125 +2014-10-12,8.680583333,8.875458333,9.357979167,7.645458333,9.047020833 +2014-10-13,8.807395833,8.4055625,8.2998125,7.013541667,8.159708333 +2014-10-14,12.36608333,11.006,11.110375,9.866333333,10.80689583 +2014-10-15,15.21397917,13.65847917,13.05916667,12.207375,13.27197917 +2014-10-16,16.6336875,15.80729167,14.77364583,13.982125,14.79902083 +2014-10-17,15.83416667,15.42947917,15.1401875,12.82454167,14.61041667 +2014-10-18,14.96495833,13.90233333,13.5276875,11.77422917,13.55189583 +2014-10-19,12.57827083,11.65479167,11.808375,9.403666667,11.62975 +2014-10-20,9.262229167,8.745791667,9.346729167,6.859333333,8.614895833 +2014-10-21,10.06120833,9.094729167,9.647229167,7.887375,9.324833333 +2014-10-22,10.96075,9.905729167,10.1108125,8.6931875,10.305375 +2014-10-23,10.22870833,9.712395833,9.819833333,9.140270833,9.934708333 +2014-10-24,10.35883333,9.6989375,10.10466667,8.536541667,10.02897917 +2014-10-25,9.800125,9.534625,9.6345625,8.445145833,9.667833333 +2014-10-26,9.719583333,9.517708333,9.466395833,8.355708333,9.64025 +2014-10-27,9.165125,9.1535625,9.254833333,7.816791667,9.134791667 +2014-10-28,8.870270833,8.6718125,8.850854167,7.900604167,8.816729167 +2014-10-29,10.9388125,9.681541667,9.783208333,9.2350625,9.968833333 +2014-10-30,9.622125,8.536833333,9.174645833,7.983354167,8.924291667 +2014-10-31,7.993583333,7.123083333,8.048020833,6.555791667,7.668666667 +2014-11-01,7.632666667,7.1288125,7.689770833,6.228354167,7.564145833 +2014-11-02,6.1378125,5.875416667,6.439583333,4.708041667,5.959145833 +2014-11-03,4.421354167,4.210520833,5.244291667,3.237416667,4.524604167 +2014-11-04,5.633895833,5.000854167,5.682979167,5.114875,5.431666667 +2014-11-05,7.672604167,6.659958333,6.795041667,6.3474375,7.121770833 +2014-11-06,7.255583333,6.590583333,6.605854167,5.846125,6.575541667 +2014-11-07,7.493479167,6.595083333,7.020875,5.372270833,6.871354167 +2014-11-08,5.2848125,4.969166667,5.740395833,3.445770833,5.09825 +2014-11-09,5.5045625,5.193041667,5.870791667,4.000333333,5.542895833 +2014-11-10,4.999375,4.895875,5.456770833,3.715,5.1899375 +2014-11-11,5.699416667,5.520125,5.728708333,4.4706875,5.5295625 +2014-11-12,8.383020833,7.594833333,7.692833333,6.410354167,7.706416667 +2014-11-13,6.109458333,5.912916667,6.313666667,4.116354167,5.8800625 +2014-11-14,4.671270833,4.460541667,5.383895833,2.504541667,4.691645833 +2014-11-15,2.3238125,2.540208333,3.6819375,0.8898125,2.59675 +2014-11-16,1.508666667,1.573416667,3.1783125,0.410666667,1.771895833 +2014-11-17,3.1071875,2.6325,3.8085625,1.075625,2.8869375 +2014-11-18,3.505979167,2.838604167,3.554833333,1.620354167,3.137708333 +2014-11-19,0.590270833,0.47575,1.817895833,0.049125,0.948625 +2014-11-20,0.858666667,0.980541667,2.301020833,0.107645833,1.243791667 +2014-11-21,0.309791667,0.777979167,1.969354167,0.0406875,0.7805625 +2014-11-22,0.194729167,0.440458333,1.768645833,0.058520833,0.2361875 +2014-11-23,1.826354167,2.268020833,3.3055,0.398229167,1.918041667 +2014-11-24,4.531645833,3.8135,4.773104167,2.873145833,4.205416667 +2014-11-25,6.721125,4.973604167,6.133875,5.6979375,5.380354167 +2014-11-26,3.831270833,2.645875,3.933854167,2.382541667,3.0285 +2014-11-27,1.470479167,1.1123125,2.584583333,0.488333333,1.749979167 +2014-11-28,1.450104167,1.000520833,2.664791667,0.3348125,1.673104167 +2014-11-29,0.354666667,0.167916667,1.785395833,0.0488125,0.391145833 +2014-11-30,1.812958333,1.191041667,2.58275,0.52,1.035645833 +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, +,,,,, diff --git a/data-raw/ProcessData_CompSiteCDF.R b/data-raw/ProcessData_CompSiteCDF.R new file mode 100644 index 0000000..8976c18 --- /dev/null +++ b/data-raw/ProcessData_CompSiteCDF.R @@ -0,0 +1,33 @@ +# Prepare data for example for CompSiteCDF +# Water Temp Data by Date (x) and Sites (y) +# +# Erik.Leppo@tetratech.com +# 20170921 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# 0. Prep#### +wd <- getwd() # assume is package directory +#library(devtools) + +# 1. Get data and process##### +# 1.1. Import Data +myFile <- "CDF_WaterTemp_2014_MA.csv" +data.import <- read.csv(file.path(wd,"data-raw",myFile)) +# 1.2. Process Data +View(data.import) +# QC check +dim(data.import) + +##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# 2. Save as RDA for use in package#### +# +data_CompSiteCDF <- data.import +devtools::use_data(data_CompSiteCDF, overwrite = TRUE) + +# # # for quick naming +# data_raw_test4_mismatchtimes <- data.import +# devtools::use_data(data_raw_test4_mismatchtimes,overwrite=TRUE) + +# # as part of help file +# myData <- data.import +# write.csv(myData,paste0("./",Selection.SUB[1],"/test4_AW_20160418_20160726.csv")) diff --git a/data/data_CompSiteCDF.rda b/data/data_CompSiteCDF.rda new file mode 100644 index 0000000000000000000000000000000000000000..d1a941e8b80e633a3381cc90104b8b6ed3150634 GIT binary patch literal 11626 zcmaiaV{j$T7iBbYGBI9kTNB%u7!%u`cw*bOUOcgF+qSJ2C;R(jw|1+xc5mJ4({<|f zxgYw=sje3JWx>rNtVOA!RnY8b4TdM;`}O~;9a3N3EIt^&tx5N_>h@Fv0&19^UkNhF zv`SCQOD5D9R_ii=&W+#dXS+_R=A4=BSrx$Yn5)&+s`h4=VXw_=Pb_Y)O+9-JZJEmw zTieb}7i=VQGAWCBu}Qg^S-Fyid2vBei9~ctE0YOy35gJ~2ddHsD^I!6(zWzL%A|W` zpA1Tx)00_#m4&+;E2N@ZnhQ#K4!)2OQOamm_L&)$pXlhq;bqs0s-&c_0H}wt94k*s zN|hN)U^JW5JjP8)zCSKiBm2W_o|S<-t6|=pWg;b|9HiI`tI51PE(^^iE}k%Apb7^| zqC672MM+6QI`F5c3L27FI$Wak0l;F$0v$TTWR}$oNC`GiNj%D)Uc?z*2tY?4O~}EO zvka1#Kg)z&<-n8n&r2%-M*m|&nUfI}^nq?gX^<1GvKErgk|2Un?pq%ES@Kn09>)qc z3ycQyE%&pGV`m3gSU?JaK}ryV0gwRyUABW*D*n0~J|VpffC^0qho0v$(95KotY1N1FYG(YzjssC2lX zkyUmUKn`$TWu7u_QXXf<$}F0Fw2(|L4R$Q6C!gn@G@-k{! z7A9fpN7jJ?5x5fhI&j$J#H8%B`&8~yQs=dvIZEo%qYL&FIsztPlM|ax40?7xF`1U) zcxw#_rs$Ya$q|zSlQdKELkg$5xqEar73W0f`U&$@wmgf)MkGuE3iQ*De$XN0D-JA% zqM`$HT+PUn;w>tRl4EAe=D}|tkcfng0xEe?WkZ`*^uyGrmAyYVZ*NX+-mc=T!^tU) zW!V|6RifIGBb|GPogi7FuKrj}ZQ0?fGyO|b)|Gcvv&h!oap4z_D*hRO(VIvl@wpgy z^3eACL&m52S3ybj%Eh7@sDjsfZ0pj+*Q?ubr}pN#!&UDmQ|9zZeM94?ZPgV&7ng=l z#k79?`c-FFclXnduO_R5`6KMxj`$f1m~%FI)-nI>lylZG_y1$brFF*tp90_L!-GJf z?{d{6yI*c^svXdzU?@=GA@-`0A2;i47R5^XtGLA}TBiSW`VANuF&G$_a#Ask%s=?g zh5x($cO?D~^=}_R3K^Yi?8G3;EIGQo!fTm~3(4Xx zINrecVf~n?$-UOX<|Fe>isKjKHnw>- z)e1VRGz1$x4W>@52R9QB|Mg@976bH>D31(jKriAn`np)hA#CH<(l%0U0F`0%cht00C zyJgh{Lzy+_6jCwVbkT;F#mbwKRsh=S9waoQkq)r3{!VZlmO(25j~rS_-Ky16@2qb2 zq!e+qMVJIh8SSSQQrFJ`sAnL0-gVd{kdQUTr<pv8thZ^f0&mb9RL|vdm~?8bv70 z5XD&G;)WkZ_S!z3-%}fPxtcYlMJy(STEiKo{GKTp{(E@u$5-I*hoFeoiF_5bCqM3& zs>t1VK|0qYL14pOe~NnaS4U}>*a|tuF=8uN$ ze4p(HO-dFquyJKPzl|W)$CCrw`N`M+Ml3-Qg`jc;d2|zUK;N;~z>X_ETA_r$LoDb_ zf?RjCH)vWeBEtK>o9HcMdLmN?vs#@t)ZOy8<33MBNJ+)9C@swd)yu8pELnflGv*mk zXa_H}l1ol#lNn-Bk0fbHg@(`5z?B?|8TwvdS9y1TdIxh3;Lzt{m%2D;$M0Or6L?

J3SOfZOnK3p--4aYrg3eJq{nX?KiuE_Wah)@Js)Y*~pZ#|gw4 zXm!4TH!ZauwYMsxuF@ymuw<*GPEWxMLWslio41ZbCAw9oUxY>)%}0D7plrzr{`?u2 z@8A^t=2#%3Cw@}@JQ(!543oX`(6i}<4#X$<&GnMC31VeXAoNy%!^1s}7-GX9D*-UX z94PUV$b5mmqccuNc^|9oc9|Whkc?W!HQh4>L6X4`n3}LA96mbnz0zQ9Keq_v^My=b zvSrURS`~7UZAL;kv*>~h)1>walM_4U>FZWeu1InSjTYb?tve22NW$f!DBR0dx@F5r3^1#LmttnISZILX% zxZa^t*irMTg%73}g?x0LZ>Z@EjeQ(fAbCK0`sqmfCujE(??sTn$v|)e4wboc#EG1d zcP3>Q4I!id{$d_)DnaD8I9s*R;Z#4jX|F4*W{uAuP`0HbHJbR5Ei5R^K;aROWKn1f zYcnp2Q{vyW=#mIv$e2qxzP!IZg$zy``I@*veg}eMUg*6i{9%Iykc+U*cSX z`fCYw&^Ss(?BTw00`t=+e}73Z-MIScA{$$&Nn6!&ya>8h^C#jb%`n%P_xid2TmtM7PaF_*vlU!h_itysW4%5~mRIo3s zdFbi6{W_!KQD>7sGIlu)O+AJzQ_{ldGgVM>f1eOFf)*aTc>BxT=1X;7Vx-bGl8n%x z?|ZC^KHa9GKBQ~&u1bDHS7!8_KrPzIgS4~Fs@8{i1&9Y{(xa1jpo;}K> zW(74E>FK4F-$>1@=)k4ZGa`DFDgPHq`7cuH-=x{U$nbxZP+%xvLf~Nkclgz4wODEx zf{Do7|F32#7z`&ts|qqPTJ?Y=J@<8T7>B+%T)*&1&AOohc7%1uHTslBW>3hp9&eCt zZ9S=%C^nw_p*bA=*|~W`pw~x3Qsso7W*7oiU%RkDV3(bp8&KRD(mpzI`M8k62t&vt z7S?4c!8wp$*6e7^QYH+X~Q3zp7E~KuVBF+DBH0TKR0uf*0o0o|aYj$>4BBiTx z99mKlR51-Wjg0cYH>yhgypr(r9|w&{^poPEwoU8*XPf=2a58Ej&>Rw=SjFi5a1#JD z#wI!{vy7vRvJk6Z*yOS`^?wP9E*)}Az)=&ZEzF9>{~KGNKG%as8!-TRN#ZubdgT6y z@gxZ%!_4uhK^z&`>-sar?(xn>X^w&S-TY@I`T4Uyq#W_kgL5q@Vy)=uDk|y!8+ahc zD^(4dq>E~n* zHYt!Oi2v)Lka94W-daG4C-YC@V1uJHNiai`8zrAekn599Cr=MbBu7TrR9I;j0^ecr zZ`-VX5Azc@^a+HqjQX9<#$NGX7YLW`O{%c4gwI_a9pUw6jqY{+G)8um(5U4hacw5= zzq-Unty8LYO4l>y_Cd4}@URa;|2YY+T8~a0M#dF2HSpKv2Q8c4Ug2OQGBB6Fbi&MM ztgXB(-$49E$7OX zq_CuHU74s=DQDz*5&BP(clkiY__dJ8X*j-YwUJ*Nctx#y?66 zSZXxXsto+kpl_}M%--VOn5cL#%Y|>)G)Q6+kyIMg9h*)O$!dM;Rcq%9GEsF{Jn{^K z(sy14|xv)3r zn^zsD;+o@ZJ`%Z#RL{a3m!$fYryUy^eLSLeQ_U--IGdTRKjJSi?Coev{F-ELSZy%4 zssBNHoL6l;_pyGaW#*l6h3!-*3_istXD_l}$lJ4boxuerOze-lz?GIST(s^)O1F{H=Kb1t_Jb2|=4E#Dd}?3x^KKB#L% zbsp&(VWYL{G<1Rjn&JRaFsYdG{a6Bl=I{|BKbqfKW|MXG1$H7!%;zc}mp751zX=7bS1rnX9Tga$yNvo;BO~DFW`H(ZgzMYwK$?DpRb`co z>=7p4#U`pzH(lv;VLu?Xx;bYekrHXjQTr4mjUe?RV|{pLzhuwdDpM_xh5)qGms68< zYq_}mjH?)q{sbmONb)RQ11i2>wz0_g4_}YXYwiqa@#8tCpA1&6lG&+AY)WLRjz&h_ zcouRy;pG) z96eSXCyVj9*g%!7TP}i;!V&`BM42t-3$Z?9$8bWMD<(E2*6OBz+C&?o)$9XSl7J$au0TM;L9^Of>u&nU*3Ig~O*l)Ge$P$@1pteyApWtuvpr0GKQ_kHdC z!hE{x=*Q&@$*l0~piFJT%rxt+bzZ{{xx_KM-A@L1fj8e{@Y%pc18olqgp{_~m}v`3 zJUXKeWbEksdn$wddLVoGV%q%~7g*_~EtY7rYaB=&BEY^MBGc^_Ix9q5(E=Zs^E(*G@qqS(W3vV9UqU>H?_P`fJBHWz z%KHPLV5pWGefGxi;v}gsFfsL6AD`P5?O5Nf6g}z9OFtwzn$KF$nF(p#4k>$4x1>zC z*|dnYJin#yJL-4Fx8kNe-Z9So;Q(?TY9#tC%XngRp0yxL)6Q+LskaN{8PARxw?@{7 zv^CRrF>8u%!#-v8u*0%Nua*3c+?L33@21weC|H(~AieXnubk#nm5?>Xrc}t^jVe-_6 zBG(m(qwvMr=Mw7dKV#-~!G43Wjih9pM`82mWFVEu)&X;;|5Wai-3 z^Aby9GB_;^cUMaKoo#t*-hwG9cicrVfWSGFC>l)OuCFW6EdeLFBc%E`WrG;${s-pU z1vyPdmr)^V3u0g$iz~FL$GOuEW`8C(41L7QTh{CA4QDhcERyo*z!nbqmNDwCL zcY6kWmM2V3Ad;%+p2-=8$JQ}rZXred zr4+`RwnI{u(C#isg4^0iprhj^zLDu^kvXOMK4x!esq37{ZNowD`))3QrRA^`K#5C} zEc?A4oGV%AN0bNnWht^8E9CWnRv0900&|%Vikril79TQ?JSEJcl&}d}Ty8$vjJC9N zAT-gz3Q7Vvq-ON7nJ;V#y&$7x3ZopD-2H__w3GvczYyfek5-}5yvqb%h{Ue>5QEMe zDM?&5Bn`vPZj37xwk!1f8S&IWVbj%A@8F|n*x;n&gOTFkJp(^VbCj)Z<}&PqLD++g zzMUQvT>CW$%F`gf6!C5YPaZ96G1j~5Wx^aFM%7wD68Q?cDr*J6GY?!&@*WNhlKyyW zN5SV?56GWDX>~fB$s8sSl|-GWO$SIZ-N&?J?G>zFo28SAu-&G?jsZiIfQ5n}o;KJ3 z5ztNm$|+kDl{BtG(meG)@YA;{7^mJ~4z%WaMR2OUB~{awMK%@*=pWPQyiC8DMed$25B8?4c$08Bu(d_XG)*kzd&>my5b!A!DmcFqhd)5{s+RODbhS+-9>X#XF3v=HE$=b(tTpWj%%6%X8k1Z|bhX1If<)^MF-4N0 zIt;7yfphQu8WmesaD*i$1rH+Rp4RgOQX5teT=btQS1ex>fs39^4xi4SMZ3L6R%B2xrz=w#Oz5;FlPvIbsFb zCHZAHCW*?L4Uk(8-H+ND_Hy78dNweEoy71rI5ThbSr|#%n}t~+zVJ*W<)&(e*iv7v zGEElv1{mMMCQ3$tjD%$1w-8$o0Njnfh#&Jx_tFqLPs5UE2Er=s>1PMP)*&CZ_tIR6 zxbXtQ$WZ4wH~sF1UYeSDE%>PF2=q(9rh)!Cwr1fuof~HOHI&I5<6lMj{$2Oquh-#$ z8>SVKWC4ekCLkdv*reHddz7-uRQGPm}#Q!R&L+9j@mXimDG(QrfSZY0W|g}z7Hfk&+pf%5c*eZ zj!jB6a~I_}J@C7bmWfOFLar(bwTfvC&7*~~#80ki+24ph7Wc8kBHv(5^OBS@s#d9)!VHx=XQvYJ z1)wq#gUtDl@ygxs>la<;I|e&po7;x{vMRMMk6=BVdrVz8840%uPsgQe+L68Fm0k_A z8PD6~d-pv3B@IU|Y!pxN0&5k=hfhejOu<-#1Qv}|x4ATeI+65rhL{XsH@?GJZM1YN z0nygx!Pvx8JXeIEN0>-drldZ;6}j{zX~y^<6k!+e{}IC5*Hn;Z#R7v78gYl@a{@|G z%&c!T8jmq=Z>H|S8r8xw4NCl$F|%fVEkUNhYHxGFAA05a@Ki0lx9?bXGz24+VI$$& z?x=flh5|c$BZ0VS(-awG5>n|hPmWQFKtEEbPRHH2*n4@(#-h5A;O|jS%fuIyS&p!i zvCw-{MS8m4C&}XhnzdV#w<3K7K}g2N!f86srAEB*wlp{I7vb$_QUK8ubLl-%@>*#D z!k@`{cxX_jVX^d*?d!U9>V*tmJg>BpN*x=vJV?!sc}83wm&Z!hRBAQNN>FiFy} zV2V~FE4av(p%6oip8iOK1Py4_WK`PaWz8p0T64@zr|zDzezQ-1_L>Ws0k&a#oiVm{ zUUO-oQly%G|FfzrX$46n)tfrHR_9-4m&7+xQ)x2PLr6qE~*_(_+~YajT=NCQT8#|F0l#7{d1JWf{@%aqFufw!!bQ_%Iq>J1=YUOX4}S%s%6il>4H{?3rGZ;xZI-p7LtQeg z4c7ngQ)^~68+Tb@W&%bo4o4>=JN!uqViko}*aIU7EI;U0Hjk;yHGs}6e^dF4QCDdn zl3k!F?=FdIT|nmk07mbrFt9RfATGHSpYbupOh%^m6k?6x++PN7>v7By#5jLD9vppI zD%+SQA(oqhTN?0ol8I@9G%T6 zK&Ar*gt=7P*sAaDg5bx!oHh#*l-MiPt^vlNf{5-Hw}pLT8#l8!&CZ!Jc*9{OA2>lh z`?u2#B&o3FAewT%=rTky`&MlH!xQnug%|STT;ILmWaR}3qJ6&kS@$W4RFAK4P=*L9 z0jGmgV>m-0#Bl`~^hynKXL>%3a3XO?Qc@fQB(0RfijhI!yD}8F} ziK`C#01MV_F?)jJ>O{E>_3v*axIc3}QbW7=umWE~U?Y^$u>y^koWv8q+PiCl$&0*! zL)d9NHyg??qWfk|HdMI>NOdC(w@)#I%M;JnVR)w9k4x?zTzR9WmzipN_noiW2g4`= zGCyo=n{FJ-q-;(T979%U1F!NxHN*-Cw4Byqv*Ed0tELIJtc455e+W8evMjMxI zl^QBPiR@Udxp6RK7BAI)sxolGw%o8NofZFlcA3ICD3d4CL=jZ>hHzK{&>h&D6V=p# z(Up@EinWOy`_`ot9VCzD29U{z4b?*sA@2`3BxZa|5;<3_O5YLN@59`P((+2Ak~A3l z`=G4qj(KR7wI@ucLi5wrUPis#tUA{7(9Tfzdg%w~HpV7YhdE@DL@0Z^ zxZRu0*h7x5ViA5OoA5(v=Ccmpt$7?p=r~bcJ_#bY#J=9Hn#cOAcN|8?MXM4wq0P#s zQNj$VT^bE?&1qH!i{}hB>xR*+nrW<9P=jsl7<(Kz6et?PRpgbQ?vF40MsK*$uzfs( z0%bp2fak#TU;(v@Qzh)5@>d*Q0sY_u!r!Kpcq%md&kf#*HFP(2G$(`gz#k~tj5IuD z0gvY21QTxl468p?XJN7@ehnwGPGLC#7PjeH&9=$z}#eu|xR#tOn78!gy;XQA%oEtQ>;uU+Ac;>LSQjTcWs6!w(&uW6nQt>AhXv0UT}KK2#i*5piL6 zAZ~e8@gHL!j|O)0lDV(5hj?&8rh5$QPG+@g%vx^V5F631YpME%Vmcn3+8-1%I*fP) zob#ioFzNtu@u|~*cnvO0-i)R=X3?KYaUrA;ZW8$6zZQpwsW`K(t`-qHCtpR<_9ww3 zw00EL9y9LDY-mtKr>ctwq>}HV{Jw}H$8q@s!MabA#dJPxVH=2CuU=Z4FzjJo#TW?_ z?bfnQDG~e2sD|mS_2W7~xA65LpUcNt0$|AL7fN{Fy7M3H*^!k`;M7O@WxC70*}FTh zfmgg=5Bm9-1B>?iuNdNDP06R~B%+rQKl1_{im0Lrut`PB{4LI+{`oAC&c|z|>v3x& zk|%O*w!lC)M$HOx->|;nCCQvx0QCy0Y_Lr_#pYd))RQh&> z$`Lc6I)a)XHGRBeDtOi;&Cxhd71v?;z&J(8ZsYcg_A~|qjefY&^82WzZ5>|lCPMts zegX-c5H!d~5;X1nKHYGqltGmY;*%OKM%^xBo~KiogOGq3lLOiE#zD`eN`*$bi>&cV zXXZ|avnJlr$ZKPn`foBwtfuFova{kfOgJ~#B`59*LU0U*CKo^Eh-(|h+pjcwoH&?? z80)PBq;&Zf`coHqw?4j;*D)yk^U}KN47aa?GYQ*!XyNpO=Oi3gb74i4`DJOWmRIsc z5A>DZ+k|=W5THDxDj2X5B|nX3rx~eJo&Bh|ORI$QmsZIzM&tZp`TgubulQ+nKN~u6 z&v9<*^1;NfX<2_iX>cDsJ+NDR%6Hy>|Jv7-BZgSK|K>UHgHphVDN`%`k~Dqka>Lti z-of+ky>?~p1#ND4((|JZrKmREoDun|qHmK!*Sf{_nm;BqIeU73L29>&vTE!np18+O z&&Czzx!X7R^|qx!FZ__OalI{B%e3pX(Aj7^e=9Dn)pROTTXAQ{zn-42?vd9k>!z&O zp4qv_o$b^^1M0x46#Ot2SE`#@&DJBCb9Dof4mu5auHt~npLLaHz`v$OHFJ@4IuT>a zTCQd!h!gk4`Y&EBJ=w;1-34&i_#s&$vs}(CSnm|$NXAYfA`?hRj8}xZ!eBY%t8^JO zI-+*A%6WBRHmH(HEWXL+1}vKmia|!tt5}YV6v~qn0UgBpqe+O43ll%j1Apzj)6DU-RuG*oSIe=^s)zjQ9g{V-Fb#Q(216=_ae;D{0 zZ8v?+>O3g@a&U!{XTeyi_p~#!iIQo%_`ELbc#ikpL(^wwxW9S#Zj{sxMB##udl+6~ zhP!y(n4S)-jIqdq(4Jq%X1&TJe?fz2KT;gSCq{&4URPONNGvh@InclYw(EvR^L++5 z5EvCa{KQ^Tu`V8pu$l5Vhg{|iTN0uM6M=rxg3zkV*9(6JOve9LvcZFAJDeLa(VbkL zU12orncS-TGmNb3S>wQL6^j?s1$r(!* z``J0^t@0W9+C3v>n9JSD#gx+bD z(Q}}~2!N6LG2ahX)C+B;AJSr5FJ6h(7l3y*j+Z4rQmX8{p%56F7?kEm=0zZtUj8d@ z{s|m;M=``o7B(jG9BneWT`orYkXdXyLa1~Csxgm~!z4fP(x={&K=oQ!QptDNdY)^M zX(y{|z-@h|S?+N;p)o0ei_O*iMts>y2Z4TKuJn$y5id@VeymAkJiKEETljRP7X~cy~(|w_BaNm)))5) zEvMBoyBH}P85gebbvdooofXAa&BlC%`^fok?eYuDGJ~P3=k2StxAZLNN0I8S<^0fH z)h_$W7tyte8&FamjJE(*knp z!q~JUVh*kshLH4Mw}Oi zcjhqPb&^EPFux06;Qtl+guEnNm>G60S&1!oF=Lc^jJ0wPirciSV?c)5>gST$pk>Y8 zObs*+n0Rz;GXQ;gyaDBq{VoaFbZbhla2iBAa!6FFQ~BCRrtcD%lM%1fh~trCvet7j z7#9N!pos@|o`Pg~aXpO7QrX&Ut(GkiRxmW|6z@6YHtxtS%iN(%3-W#!zdlVh=+63o z>H08o_sfbi4Z25SzE2$5d^ph_SAhbV&l~DZhDLmXVWx~5nO(Z#lUnJhv?FwWhSZP& zRxsh@9qbnckU-Up1EnS&<_tLr$?zsPVQ(%#GCe%8m9(YIchqCL%WKe{a9PX*>q4aoyULi4I97tuO><*Td758_B_ z?-UA)FL~Rn%;d5pn0&1DmAh{P(3yc`lm^qPY1{KT@oU7-0`VN_ms2p`yIPni&55@6&ZkI7mFeu>^f!c<$9NAhH^3YGZxwi$ zAsl)*9cXNke&&{X9z1sYV3MI2gQ8yrB;H@?hYBZQq#6qO_Li?(v}Is3Xlk9la|8+b z8Old6*6WebNig$mV&9$1IlfKnjpK@L)H-KhAAG^jF*L3VcW>QmVZjFSEw5VFl!Gr_ zTZ|@x4(g!eW7ngTII@hy#D;1n+oWyP=le<_HV<&>CK(5_(lDw^FTune28KQ#zWN89 zE#*xW68UorPr3=?fTvBJm0gf`w2^j*jf)yZu5uU{zEvQSsEGLHsEc_f;jZ2EylM7* z7b+P!L*g<;BR+fRE@ZwsRD{El+!jl-i>%7vZrxE@(0fY8^I}R<>8-`6bCSH^2xV5$aHIt^!!{xgLzj6*%5uyb~txlu?`RYgbxMk=pTe2pYQo h*-9EMWUd!Cn)N5)Z6_Yb*WgCH0tPvC)efNNe*kfAngIX+ literal 0 HcmV?d00001 diff --git a/inst/extdata/ContDataQC_LibraryCreation.Rmd b/inst/extdata/ContDataQC_LibraryCreation.Rmd new file mode 100644 index 0000000..ab9d6cb --- /dev/null +++ b/inst/extdata/ContDataQC_LibraryCreation.Rmd @@ -0,0 +1,127 @@ +--- +title: "ContDataQC Library Creation" +output: html_notebook +--- + +Helper code for creating library. + +Erik.Leppo@tetratech.com +2017-09-26 + +# Package + +Package related code. + +## Vignette +```{r, eval=FALSE} +# generate Vignette +library(ContDataQC) +library(devtools) +devtools::build_vignettes() + +devtools::use_vignette("ContDataQC_Vignette") +``` +devtools::build() more useful and build_vignettes() + +## Create Package +Use the code below to create the package. Assumes you are in the development package directory + +```{r, eval=FALSE} +# Library Name +myLibrary <- "ContDataQC" +# Load Library +library(devtools) +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Create Package +# create(myLibrary) +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Document, Install, and Reload Library +## Generate Documentation +setwd(paste0("./",myLibrary)) +devtools::document() +## Install New Package (locally) +setwd("..") # return to root directory first +devtools::install(myLibrary) +## Reload library +library(myLibrary,character.only = TRUE) +# change wd back to package +setwd(paste0("./",myLibrary)) +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``` + +After creating the package reload it after restarting R within RStudio (Ctrl + Shift + F10). + +```{r, eval=FALSE} +# Restart R within RStudio: Ctrl + Shift + F10 +library(ContDataQC) +??ContDataQC +?fun.Export.IHA +?Export.StreamThermal +``` + +## Including Data +All data should have already been created. + +## Check + +```{r, eval=FALSE} +# Check for errors (or press Cmd + Shift + E in RStudio) +#http://r-pkgs.had.co.nz/check.html +devtools::check() +``` + +If use RStudio, Build - Check Package, the output is easier to read. + +## Test +Need to use testthat but not to that stage yet. + +RStudio, Build - Test Package. + + +# Other Stuff + +```{r, eval=FALSE} +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Upload to Github via GitHub Desktop utility +# 0. download from web via "clone or download" via "Open in Desktop" (GitHub Desktop) if not already in GitHub Desktop +# 1. Make changes in download/clone folder. (done above) +# 3. Open GH Desktop commit changes then sync. +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# install from GitHub (via devtools) +devtools::install_github(paste0("leppott/",myLibrary)) +# + + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# remove installed packages (if needed for troubleshooting) +search() # find +#detach(3) # remove by number +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +# to build package +#https://thepoliticalmethodologist.com/2014/08/14/building-and-maintaining-r-packages-with-devtools-and-roxygen2/ +# To build the package as a compressed file in your working directory, run build(current.code, path=getwd()). + +# to save internal data for examples +# example +#http://r-pkgs.had.co.nz/data.html#data-sysdata +# have to be at root directory (above package) +#devtools::use_data(NV.predictors,NV.bugs,pkg="MMIcalcNV",internal=TRUE,overwrite=TRUE) +## verify with data() + +# To save RMD files +# http://stackoverflow.com/questions/30377213/how-to-include-rmarkdown-file-in-r-package +# /pkg/inst/rmd/ +# system.file("rmd/file.Rmd", package="packagename") +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#https://hilaryparker.com/2014/04/29/writing-an-r-package-from-scratch/ +# Create Package +# create(myLibrary) + +``` + diff --git a/inst/extdata/ContDataQC_LibraryCreation.nb.html b/inst/extdata/ContDataQC_LibraryCreation.nb.html new file mode 100644 index 0000000..6efcbf8 --- /dev/null +++ b/inst/extdata/ContDataQC_LibraryCreation.nb.html @@ -0,0 +1,354 @@ + + + + + + + + + + + + + +ContDataQC Library Creation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+ + + + + + + + + + + + + + + + + +

Helper code for creating library.

+

Erik.Leppo@tetratech.com 2017-09-26

+
+

Package

+

Package related code.

+
+

Vignette

+ + + +
# generate Vignette
+library(ContDataQC)
+library(devtools)
+devtools::build_vignettes()
+
+devtools::use_vignette("ContDataQC_Vignette")
+ + + +

devtools::build() more useful and build_vignettes()

+
+
+

Create Package

+

Use the code below to create the package. Assumes you are in the development package directory

+ + + +
# Library Name
+myLibrary <- "ContDataQC"
+# Load Library
+library(devtools)
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# Create Package
+# create(myLibrary)
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# Document, Install, and Reload Library
+## Generate Documentation
+setwd(paste0("./",myLibrary))
+devtools::document()
+## Install New Package (locally)
+setwd("..") # return to root directory first
+devtools::install(myLibrary)
+## Reload library
+library(myLibrary,character.only = TRUE)
+# change wd back to package
+setwd(paste0("./",myLibrary))
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ + + +

After creating the package reload it after restarting R within RStudio (Ctrl + Shift + F10).

+ + + +
# Restart R within RStudio:  Ctrl + Shift + F10
+library(ContDataQC)
+??ContDataQC
+?fun.Export.IHA
+?Export.StreamThermal  
+ + + +
+
+

Including Data

+

All data should have already been created.

+
+
+

Check

+ + + +
# Check for errors (or press Cmd + Shift + E in RStudio)
+#http://r-pkgs.had.co.nz/check.html
+devtools::check()
+ + + +

If use RStudio, Build - Check Package, the output is easier to read.

+
+
+

Test

+

Need to use testthat but not to that stage yet.

+

RStudio, Build - Test Package.

+
+
+
+

Other Stuff

+ + + +
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# Upload to Github via GitHub Desktop utility
+# 0. download from web via "clone or download" via "Open in Desktop" (GitHub Desktop) if not already in GitHub Desktop
+# 1. Make changes in download/clone folder. (done above)
+# 3. Open GH Desktop commit changes then sync.
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# install from GitHub (via devtools)
+devtools::install_github(paste0("leppott/",myLibrary))
+#
+
+
+
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# remove installed packages (if needed for troubleshooting)
+search() # find
+#detach(3) # remove by number
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+# to build package
+#https://thepoliticalmethodologist.com/2014/08/14/building-and-maintaining-r-packages-with-devtools-and-roxygen2/
+# To build the package as a compressed file in your working directory, run build(current.code, path=getwd()).
+
+# to save internal data for examples
+# example
+#http://r-pkgs.had.co.nz/data.html#data-sysdata
+# have to be at root directory (above package)
+#devtools::use_data(NV.predictors,NV.bugs,pkg="MMIcalcNV",internal=TRUE,overwrite=TRUE)
+## verify with data()
+
+# To save RMD files
+# http://stackoverflow.com/questions/30377213/how-to-include-rmarkdown-file-in-r-package
+# /pkg/inst/rmd/
+# system.file("rmd/file.Rmd", package="packagename")
+#
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#https://hilaryparker.com/2014/04/29/writing-an-r-package-from-scratch/
+# Create Package
+# create(myLibrary)
+ + + + +
+ +
LS0tDQp0aXRsZTogIkNvbnREYXRhUUMgTGlicmFyeSBDcmVhdGlvbiINCm91dHB1dDogaHRtbF9ub3RlYm9vaw0KLS0tDQoNCkhlbHBlciBjb2RlIGZvciBjcmVhdGluZyBsaWJyYXJ5Lg0KDQpFcmlrLkxlcHBvQHRldHJhdGVjaC5jb20NCjIwMTctMDktMjYNCg0KIyBQYWNrYWdlDQoNClBhY2thZ2UgcmVsYXRlZCBjb2RlLg0KDQojIyBWaWduZXR0ZQ0KYGBge3IsIGV2YWw9RkFMU0V9DQojIGdlbmVyYXRlIFZpZ25ldHRlDQpsaWJyYXJ5KENvbnREYXRhUUMpDQpsaWJyYXJ5KGRldnRvb2xzKQ0KZGV2dG9vbHM6OmJ1aWxkX3ZpZ25ldHRlcygpDQoNCmRldnRvb2xzOjp1c2VfdmlnbmV0dGUoIkNvbnREYXRhUUNfVmlnbmV0dGUiKQ0KYGBgDQpkZXZ0b29sczo6YnVpbGQoKSBtb3JlIHVzZWZ1bCBhbmQgYnVpbGRfdmlnbmV0dGVzKCkNCg0KIyMgQ3JlYXRlIFBhY2thZ2UNClVzZSB0aGUgY29kZSBiZWxvdyB0byBjcmVhdGUgdGhlIHBhY2thZ2UuICBBc3N1bWVzIHlvdSBhcmUgaW4gdGhlIGRldmVsb3BtZW50IHBhY2thZ2UgZGlyZWN0b3J5DQoNCmBgYHtyLCBldmFsPUZBTFNFfQ0KIyBMaWJyYXJ5IE5hbWUNCm15TGlicmFyeSA8LSAiQ29udERhdGFRQyINCiMgTG9hZCBMaWJyYXJ5DQpsaWJyYXJ5KGRldnRvb2xzKQ0KI35+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn4NCiMgQ3JlYXRlIFBhY2thZ2UNCiMgY3JlYXRlKG15TGlicmFyeSkNCiN+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+DQojDQojfn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fg0KIyBEb2N1bWVudCwgSW5zdGFsbCwgYW5kIFJlbG9hZCBMaWJyYXJ5DQojIyBHZW5lcmF0ZSBEb2N1bWVudGF0aW9uDQpzZXR3ZChwYXN0ZTAoIi4vIixteUxpYnJhcnkpKQ0KZGV2dG9vbHM6OmRvY3VtZW50KCkNCiMjIEluc3RhbGwgTmV3IFBhY2thZ2UgKGxvY2FsbHkpDQpzZXR3ZCgiLi4iKSAjIHJldHVybiB0byByb290IGRpcmVjdG9yeSBmaXJzdA0KZGV2dG9vbHM6Omluc3RhbGwobXlMaWJyYXJ5KQ0KIyMgUmVsb2FkIGxpYnJhcnkNCmxpYnJhcnkobXlMaWJyYXJ5LGNoYXJhY3Rlci5vbmx5ID0gVFJVRSkNCiMgY2hhbmdlIHdkIGJhY2sgdG8gcGFja2FnZQ0Kc2V0d2QocGFzdGUwKCIuLyIsbXlMaWJyYXJ5KSkNCiN+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+DQpgYGANCg0KQWZ0ZXIgY3JlYXRpbmcgdGhlIHBhY2thZ2UgcmVsb2FkIGl0IGFmdGVyIHJlc3RhcnRpbmcgUiB3aXRoaW4gUlN0dWRpbyAoQ3RybCArIFNoaWZ0ICsgRjEwKS4NCg0KYGBge3IsIGV2YWw9RkFMU0V9DQojIFJlc3RhcnQgUiB3aXRoaW4gUlN0dWRpbzogIEN0cmwgKyBTaGlmdCArIEYxMA0KbGlicmFyeShDb250RGF0YVFDKQ0KPz9Db250RGF0YVFDDQo/ZnVuLkV4cG9ydC5JSEENCj9FeHBvcnQuU3RyZWFtVGhlcm1hbCAgDQpgYGANCg0KIyMgSW5jbHVkaW5nIERhdGENCkFsbCBkYXRhIHNob3VsZCBoYXZlIGFscmVhZHkgYmVlbiBjcmVhdGVkLg0KDQojIyBDaGVjaw0KDQpgYGB7ciwgZXZhbD1GQUxTRX0NCiMgQ2hlY2sgZm9yIGVycm9ycyAob3IgcHJlc3MgQ21kICsgU2hpZnQgKyBFIGluIFJTdHVkaW8pDQojaHR0cDovL3ItcGtncy5oYWQuY28ubnovY2hlY2suaHRtbA0KZGV2dG9vbHM6OmNoZWNrKCkNCmBgYA0KDQpJZiB1c2UgUlN0dWRpbywgQnVpbGQgLSBDaGVjayBQYWNrYWdlLCB0aGUgb3V0cHV0IGlzIGVhc2llciB0byByZWFkLiANCg0KIyMgVGVzdA0KTmVlZCB0byB1c2UgdGVzdHRoYXQgYnV0IG5vdCB0byB0aGF0IHN0YWdlIHlldC4NCg0KUlN0dWRpbywgQnVpbGQgLSBUZXN0IFBhY2thZ2UuDQoNCg0KIyBPdGhlciBTdHVmZg0KDQpgYGB7ciwgZXZhbD1GQUxTRX0NCiN+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+DQojIFVwbG9hZCB0byBHaXRodWIgdmlhIEdpdEh1YiBEZXNrdG9wIHV0aWxpdHkNCiMgMC4gZG93bmxvYWQgZnJvbSB3ZWIgdmlhICJjbG9uZSBvciBkb3dubG9hZCIgdmlhICJPcGVuIGluIERlc2t0b3AiIChHaXRIdWIgRGVza3RvcCkgaWYgbm90IGFscmVhZHkgaW4gR2l0SHViIERlc2t0b3ANCiMgMS4gTWFrZSBjaGFuZ2VzIGluIGRvd25sb2FkL2Nsb25lIGZvbGRlci4gKGRvbmUgYWJvdmUpDQojIDMuIE9wZW4gR0ggRGVza3RvcCBjb21taXQgY2hhbmdlcyB0aGVuIHN5bmMuDQojfn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fg0KIyBpbnN0YWxsIGZyb20gR2l0SHViICh2aWEgZGV2dG9vbHMpDQpkZXZ0b29sczo6aW5zdGFsbF9naXRodWIocGFzdGUwKCJsZXBwb3R0LyIsbXlMaWJyYXJ5KSkNCiMNCg0KDQoNCiN+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+DQojIHJlbW92ZSBpbnN0YWxsZWQgcGFja2FnZXMgKGlmIG5lZWRlZCBmb3IgdHJvdWJsZXNob290aW5nKQ0Kc2VhcmNoKCkgIyBmaW5kDQojZGV0YWNoKDMpICMgcmVtb3ZlIGJ5IG51bWJlcg0KI35+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn4NCg0KDQojIHRvIGJ1aWxkIHBhY2thZ2UNCiNodHRwczovL3RoZXBvbGl0aWNhbG1ldGhvZG9sb2dpc3QuY29tLzIwMTQvMDgvMTQvYnVpbGRpbmctYW5kLW1haW50YWluaW5nLXItcGFja2FnZXMtd2l0aC1kZXZ0b29scy1hbmQtcm94eWdlbjIvDQojIFRvIGJ1aWxkIHRoZSBwYWNrYWdlIGFzIGEgY29tcHJlc3NlZCBmaWxlIGluIHlvdXIgd29ya2luZyBkaXJlY3RvcnksIHJ1biBidWlsZChjdXJyZW50LmNvZGUsIHBhdGg9Z2V0d2QoKSkuDQoNCiMgdG8gc2F2ZSBpbnRlcm5hbCBkYXRhIGZvciBleGFtcGxlcw0KIyBleGFtcGxlDQojaHR0cDovL3ItcGtncy5oYWQuY28ubnovZGF0YS5odG1sI2RhdGEtc3lzZGF0YQ0KIyBoYXZlIHRvIGJlIGF0IHJvb3QgZGlyZWN0b3J5IChhYm92ZSBwYWNrYWdlKQ0KI2RldnRvb2xzOjp1c2VfZGF0YShOVi5wcmVkaWN0b3JzLE5WLmJ1Z3MscGtnPSJNTUljYWxjTlYiLGludGVybmFsPVRSVUUsb3ZlcndyaXRlPVRSVUUpDQojIyB2ZXJpZnkgd2l0aCBkYXRhKCkNCg0KIyBUbyBzYXZlIFJNRCBmaWxlcw0KIyBodHRwOi8vc3RhY2tvdmVyZmxvdy5jb20vcXVlc3Rpb25zLzMwMzc3MjEzL2hvdy10by1pbmNsdWRlLXJtYXJrZG93bi1maWxlLWluLXItcGFja2FnZQ0KIyAvcGtnL2luc3Qvcm1kLw0KIyBzeXN0ZW0uZmlsZSgicm1kL2ZpbGUuUm1kIiwgcGFja2FnZT0icGFja2FnZW5hbWUiKQ0KIw0KI35+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn4NCiNodHRwczovL2hpbGFyeXBhcmtlci5jb20vMjAxNC8wNC8yOS93cml0aW5nLWFuLXItcGFja2FnZS1mcm9tLXNjcmF0Y2gvDQojIENyZWF0ZSBQYWNrYWdlDQojIGNyZWF0ZShteUxpYnJhcnkpDQoNCmBgYA0KDQo=
+ + + +
+ + + + + + + + diff --git a/man/CompSiteCDF.Rd b/man/CompSiteCDF.Rd new file mode 100644 index 0000000..9c0ffe6 --- /dev/null +++ b/man/CompSiteCDF.Rd @@ -0,0 +1,47 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/CompSiteCDF.R +\name{CompSiteCDF} +\alias{CompSiteCDF} +\title{CompSiteCDF, compare CDFs of sites} +\usage{ +CompSiteCDF(df.input, dir.input, dir.output, ParamName.xlab) +} +\arguments{ +\item{df.input}{Input data as CSV. Needs 6 columns (SampleID and up to 5 SiteIDs).} + +\item{dir.input}{Directory where data file is located.} + +\item{dir.output}{Directory where PDF file is to be saved.} + +\item{ParamName.xlab}{Parameter name for x-axis on plots} +} +\value{ +Returns a PDF of CDFs. +} +\description{ +Takes as an input a data frame with date and up to 5 columns of parameter data. +Column names are SiteIDs and values are daily means for some measurement. +} +\details{ +CDFs are generate for year, season, and year/season and saved to a PDF +} +\examples{ +# load bio data +df.data <- data_CompSiteCDF +dim(df.data) +View(df.data) + +# subsample +mySize <- 200 +Seed.MS <- 18171210 +bugs.mysize <- rarify(inbug=DF.biodata, sample.ID="SampRep",abund="Count",subsiz=mySize, mySeed=Seed.MS) +dim(bugs.mysize) +View(bugs.mysize) +# save the data +write.table(bugs.mysize,paste("bugs",mySize,"txt",sep="."),sep="\\t") +} +\keyword{CDF,} +\keyword{comparison} +\keyword{continuous} +\keyword{data,} +\keyword{site} diff --git a/man/data_CompSiteCDF.Rd b/man/data_CompSiteCDF.Rd new file mode 100644 index 0000000..9597b4b --- /dev/null +++ b/man/data_CompSiteCDF.Rd @@ -0,0 +1,14 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/data.r +\docType{data} +\name{data_CompSiteCDF} +\alias{data_CompSiteCDF} +\title{data_CompSiteCDF} +\format{a data frame with 349 observations and 6 variables:} +\usage{ +data_CompSiteCDF +} +\description{ +Test data for CompSiteCDF(). Includes daily mean water temp (C) data for 5. +} +\keyword{datasets}