diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8154873..a2e34ed 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,7 +33,7 @@ jobs: - name: Build run: sbt package - name: Test - run: sbt test + run: sbt test scalafmtCheckAll - uses: actions/upload-artifact@v3 with: name: jars diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 0000000..0c42578 --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,18 @@ +version = 3.7.14 +runner.dialect = scala213source3 +align = none +align.openParenCallSite = false +align.openParenDefnSite = false +continuationIndent.defnSite = 2 +danglingParentheses.preset = true +docstrings.style = Asterisk +maxColumn = 120 +importSelectors = singleLine +rewrite.redundantBraces.stringInterpolation = true +rewrite.rules = [ + RedundantParens, + PreferCurlyFors, + SortImports, +] +runner.fatalWarnings = true +newlines.afterCurlyLambdaParams = keep diff --git a/README.md b/README.md index 22d054b..ead2f33 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ http://lucidsoftware.github.io/relate/ [![Build Status](https://travis-ci.com/lucidsoftware/relate.svg)](https://travis-ci.com/lucidsoftware/relate) -[![Maven Version](https://img.shields.io/maven-central/v/com.lucidchart/relate_2.12.svg)](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.lucidchart%22%20AND%20a%3A%22relate_2.12%22) +[![Maven Version](https://img.shields.io/maven-central/v/com.lucidchart/relate_2.13.svg)](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.lucidchart%22%20AND%20a%3A%22relate_2.13%22) [![Join the chat at https://gitter.im/lucidsoftware/relate](https://badges.gitter.im/lucidsoftware/relate.svg)](https://gitter.im/lucidsoftware/relate?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Relate is a lightweight, blazingly fast database access layer for Scala that abstracts the idiosyncricies of the JDBC while keeping complete control over the SQL. diff --git a/build.sbt b/build.sbt index c48cf55..d2bd6f4 100644 --- a/build.sbt +++ b/build.sbt @@ -1,26 +1,20 @@ import com.lucidchart.sbtcross.ProjectAggregateArgument.toArgument lazy val macros = project.in(file("macros")).cross.dependsOn(relate) -lazy val `macros2.11` = macros("2.11.12") -lazy val `macros2.12` = macros("2.12.11") -lazy val `macros2.13` = macros("2.13.2") -lazy val macrosAggregate = macros.aggregate(`macros2.11`, `macros2.12`, `macros2.13`).settings( +lazy val `macros2.13` = macros("2.13.12") +lazy val macrosAggregate = macros.aggregate( `macros2.13`).settings( publish / skip := true ) lazy val relate = project.in(file("relate")).cross -lazy val `relate2.11` = relate("2.11.12") -lazy val `relate2.12` = relate("2.12.11") -lazy val `relate2.13` = relate("2.13.2") -lazy val relateAggregate = relate.aggregate(`relate2.11`, `relate2.12`, `relate2.13`).settings( +lazy val `relate2.13` = relate("2.13.12") +lazy val relateAggregate = relate.aggregate(`relate2.13`).settings( publish / skip := true ) lazy val postgres = project.in(file("postgres")).cross.dependsOn(relate) -lazy val `postgres2.11` = postgres("2.11.12") -lazy val `postgres2.12` = postgres("2.12.11") -lazy val `postgres2.13` = postgres("2.13.2") -lazy val postgresAggregate = postgres.aggregate(`postgres2.11`, `postgres2.12`, `postgres2.13`).settings( +lazy val `postgres2.13` = postgres("2.13.12") +lazy val postgresAggregate = postgres.aggregate(`postgres2.13`).settings( publish / skip := true ) diff --git a/macros/build.sbt b/macros/build.sbt index b1d1b67..bd5dea1 100644 --- a/macros/build.sbt +++ b/macros/build.sbt @@ -6,11 +6,6 @@ libraryDependencies ++= Seq( "org.specs2" %% "specs2-mock" % "4.6.0" % Test, ) -libraryDependencies ++= (CrossVersion.binaryScalaVersion(scalaVersion.value) match { - case "2.11" | "2.12" => Seq(compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full)) - case _ => Seq.empty -}) - moduleName := "relate-macros" scalacOptions += "-language:experimental.macros" diff --git a/macros/src/main/scala/com/lucidchart/open/relate/macros/RowParserImpl.scala b/macros/src/main/scala/com/lucidchart/open/relate/macros/RowParserImpl.scala index be66e3a..62bcda2 100644 --- a/macros/src/main/scala/com/lucidchart/open/relate/macros/RowParserImpl.scala +++ b/macros/src/main/scala/com/lucidchart/open/relate/macros/RowParserImpl.scala @@ -26,18 +26,17 @@ class RowParserImpl(val c: Context) { val opts: AnnotOpts = c.prefix.tree match { case q"new Record(..$params)" => - val paramTrees: Map[String, Tree] = params.map { - case q"$optNameAst -> $optValueAst" => - val optName = optNameAst match { - case Literal(Constant(optName: String)) => optName - case name => c.abort(name.pos, "Keys must be literal strings") - } - - if (!validOptions.contains(optName)) { - c.abort(optNameAst.pos, s"$optName is an invalid option. Valid options: ${validOptions.mkString(", ")}") - } - - optName -> optValueAst + val paramTrees: Map[String, Tree] = params.map { case q"$optNameAst -> $optValueAst" => + val optName = optNameAst match { + case Literal(Constant(optName: String)) => optName + case name => c.abort(name.pos, "Keys must be literal strings") + } + + if (!validOptions.contains(optName)) { + c.abort(optNameAst.pos, s"$optName is an invalid option. Valid options: ${validOptions.mkString(", ")}") + } + + optName -> optValueAst }.toMap if (paramTrees.contains("colMapping") && paramTrees.contains("snakeCase")) { @@ -46,15 +45,17 @@ class RowParserImpl(val c: Context) { paramTrees.foldLeft(AnnotOpts(false, Map.empty)) { case (opts, (optName, optValueAst)) => optName match { - case "colMapping" => optValueAst match { - case q"Map[..$tpts](..$params)" => - opts.copy(remapping = getRemapping(params)) - } - case "snakeCase" => optValueAst match { - case q"true" => opts.copy(snakeCase = true) - case q"false" => opts.copy(snakeCase = false) - case value => c.abort(value.pos, "snakeCase requires a literal true or false value") - } + case "colMapping" => + optValueAst match { + case q"Map[..$tpts](..$params)" => + opts.copy(remapping = getRemapping(params)) + } + case "snakeCase" => + optValueAst match { + case q"true" => opts.copy(snakeCase = true) + case q"false" => opts.copy(snakeCase = false) + case value => c.abort(value.pos, "snakeCase requires a literal true or false value") + } } } case q"new Record()" => AnnotOpts(false, Map.empty) @@ -64,7 +65,6 @@ class RowParserImpl(val c: Context) { val result: List[Tree] = inputs match { case target @ q"case class $tpname[..$tparams] $ctorMods(...$paramss) extends { ..$earlydefns } with ..$parents { $self => ..$stats }" :: tail => - val params = paramss.head val paramNames = params.map(_.name.toString).toSet @@ -134,13 +134,13 @@ class RowParserImpl(val c: Context) { private def tupleValueString(tupleTree: Tree): String = { val remapAst = tupleTree match { case q"$aa($colLit).$arrow[..$tpts]($remapAst)" => remapAst - case q"$col -> $remapAst" => remapAst - case q"($col, $remapAst)" => remapAst + case q"$col -> $remapAst" => remapAst + case q"($col, $remapAst)" => remapAst } remapAst match { case Literal(Constant(remap: String)) => remap - case value => c.abort(value.pos, "Remappings must be literal strings") + case value => c.abort(value.pos, "Remappings must be literal strings") } } @@ -189,11 +189,16 @@ class RowParserImpl(val c: Context) { } } - private def toSnakeCase(s: String): String = s.replaceAll( - "([A-Z]+)([A-Z][a-z])", "$1_$2" - ).replaceAll( - "([a-z\\d])([A-Z])", "$1_$2" - ).toLowerCase + private def toSnakeCase(s: String): String = s + .replaceAll( + "([A-Z]+)([A-Z][a-z])", + "$1_$2" + ) + .replaceAll( + "([a-z\\d])([A-Z])", + "$1_$2" + ) + .toLowerCase private def findCaseClassFields(ty: Type): List[(TermName, Type)] = { ty.members.sorted.collect { @@ -204,7 +209,7 @@ class RowParserImpl(val c: Context) { private def expand(colLit: Tree, tree: Tree): (String, Tree) = { val col = colLit match { case Literal(Constant(col: String)) => col - case _ => c.abort(colLit.pos, "Column names must be literal strings") + case _ => c.abort(colLit.pos, "Column names must be literal strings") } col -> tree } @@ -212,9 +217,9 @@ class RowParserImpl(val c: Context) { private def getRemapping(params: List[Tree]): Map[String, Tree] = { params.map { case tree @ q"$aa($colLit).$arrow[..$tpts]($remapLit)" => expand(colLit, tree) - case tree @ q"$colLit -> $remapLit" => expand(colLit, tree) - case tree @ q"($colLit, $remapLit)" => expand(colLit, tree) - case tree => c.abort(tree.pos, "Remappings must be literal tuples") + case tree @ q"$colLit -> $remapLit" => expand(colLit, tree) + case tree @ q"($colLit, $remapLit)" => expand(colLit, tree) + case tree => c.abort(tree.pos, "Remappings must be literal tuples") }.toMap } } diff --git a/macros/src/test/scala/com/lucidchart/open/relate/macros/RowParserTest.scala b/macros/src/test/scala/com/lucidchart/open/relate/macros/RowParserTest.scala index dc7f8a0..54ffeec 100644 --- a/macros/src/test/scala/com/lucidchart/open/relate/macros/RowParserTest.scala +++ b/macros/src/test/scala/com/lucidchart/open/relate/macros/RowParserTest.scala @@ -42,7 +42,6 @@ case class Big( def m2: Int = 0 } - class RowParserTest extends Specification with Mockito { class MockableRow extends SqlRow(null) { final override def apply[A: ColReader](col: String): A = super.apply(col) @@ -57,7 +56,7 @@ class RowParserTest extends Specification with Mockito { val p = generateParser[Thing] - p.parse(row) mustEqual(Thing("hi", Some(20))) + p.parse(row) mustEqual (Thing("hi", Some(20))) } "generate parser w/snake_case columns" in { @@ -67,14 +66,16 @@ class RowParserTest extends Specification with Mockito { val p = generateSnakeParser[Thing] - p.parse(row) mustEqual(Thing("gregg", Some(20))) + p.parse(row) mustEqual (Thing("gregg", Some(20))) } "remap column names" in { - val p = generateParser[User](Map( - "firstName" -> "fname", - "lastName" -> "lname" - )) + val p = generateParser[User]( + Map( + "firstName" -> "fname", + "lastName" -> "lname" + ) + ) val row = mock[MockableRow] row.stringOption("fname") returns Some("gregg") @@ -84,10 +85,12 @@ class RowParserTest extends Specification with Mockito { } "remap column names w/normal tuple syntax" in { - val p = generateParser[User](Map( - ("firstName", "fname"), - ("lastName", "lname") - )) + val p = generateParser[User]( + Map( + ("firstName", "fname"), + ("lastName", "lname") + ) + ) val row = mock[MockableRow] row.stringOption("fname") returns Some("gregg") @@ -97,9 +100,11 @@ class RowParserTest extends Specification with Mockito { } "remap some column names" in { - val p = generateParser[User](Map( - "firstName" -> "fname" - )) + val p = generateParser[User]( + Map( + "firstName" -> "fname" + ) + ) val row = mock[MockableRow] row.stringOption("fname") returns Some("gregg") @@ -110,18 +115,39 @@ class RowParserTest extends Specification with Mockito { "generate parser for a case class > 22 fields" in { val row = mock[MockableRow] - for (i <- (1 to 9)) { row.intOption(s"f${i}") returns Some(i) } - for (i <- (10 to 19)) { row.intOption(s"z${i}") returns Some(i) } - for (i <- (20 to 25)) { row.intOption(s"a${i}") returns Some(i) } - + for (i <- 1 to 9) { row.intOption(s"f${i}") returns Some(i) } + for (i <- 10 to 19) { row.intOption(s"z${i}") returns Some(i) } + for (i <- 20 to 25) { row.intOption(s"a${i}") returns Some(i) } val p = generateParser[Big] - p.parse(row) mustEqual(Big( - 1, Some(2), 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, Some(24), 25) - ) + p.parse(row) mustEqual (Big( + 1, + Some(2), + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + Some(24), + 25 + )) } "fail to compile with non-literals" in { diff --git a/project/plugins.sbt b/project/plugins.sbt index 4232aff..5c896aa 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,3 +3,5 @@ addSbtPlugin("com.lucidchart" % "sbt-cross" % "4.0") addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.18") + +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") diff --git a/relate/build.sbt b/relate/build.sbt index d708af6..b50904b 100644 --- a/relate/build.sbt +++ b/relate/build.sbt @@ -16,9 +16,6 @@ libraryDependencies ++= Seq( ) libraryDependencies ++= (CrossVersion.binaryScalaVersion(scalaVersion.value) match { - case "2.10" => Seq("com.typesafe.play" %% "anorm" % "2.4.0" % Benchmark) - case "2.11" => Seq("com.typesafe.play" %% "anorm" % "2.5.2" % Benchmark) - case "2.12" => Seq("com.typesafe.play" %% "anorm" % "2.6.0-M1" % Benchmark) case "2.13" => Seq("org.playframework.anorm" %% "anorm" % "2.6.7" % Benchmark) }) diff --git a/relate/src/main/scala/com/lucidchart/relate/ColReader.scala b/relate/src/main/scala/com/lucidchart/relate/ColReader.scala index 8f4f5ad..1c7c525 100644 --- a/relate/src/main/scala/com/lucidchart/relate/ColReader.scala +++ b/relate/src/main/scala/com/lucidchart/relate/ColReader.scala @@ -21,7 +21,7 @@ trait ColReader[A] { self => def flatMap[B](f: A => ColReader[B]): ColReader[B] = ColReader[B] { (col, rs) => self.read(col, rs) match { case Some(a) => f(a).read(col, rs) - case None => None + case None => None } } @@ -34,26 +34,32 @@ object ColReader { def read(col: String, rs: SqlRow): Option[A] = f(col, rs) } - implicit val jbigDecimalReader: ColReader[java.math.BigDecimal] = ColReader { (col, row) => row.javaBigDecimalOption(col)} - implicit val bigDecimalReader: ColReader[BigDecimal] = ColReader { (col, row) => row.bigDecimalOption(col)} - implicit val jBigIntReader: ColReader[java.math.BigInteger] = ColReader { (col, row) => row.javaBigIntegerOption(col)} - implicit val bigIntReader: ColReader[BigInt] = ColReader { (col, row) => row.bigIntOption(col)} - implicit val boolReader: ColReader[Boolean] = ColReader { (col, row) => row.boolOption(col)} - implicit val byteArrayReader: ColReader[Array[Byte]] = ColReader { (col, row) => row.byteArrayOption(col)} - implicit val byteReader: ColReader[Byte] = ColReader { (col, row) => row.byteOption(col)} - implicit val dateReader: ColReader[Date] = ColReader { (col, row) => row.dateOption(col)} - implicit val localDateReader: ColReader[LocalDate] = ColReader { (col, row) => row.localDateOption(col)} - implicit val instantReader: ColReader[Instant] = ColReader { (col, row) => row.instantOption(col)} - implicit val doubleReader: ColReader[Double] = ColReader { (col, row) => row.doubleOption(col)} - implicit val intReader: ColReader[Int] = ColReader { (col, row) => row.intOption(col)} - implicit val longReader: ColReader[Long] = ColReader { (col, row) => row.longOption(col)} - implicit val shortReader: ColReader[Short] = ColReader { (col, row) => row.shortOption(col)} - implicit val stringReader: ColReader[String] = ColReader { (col, row) => row.stringOption(col)} - implicit val uuidReader: ColReader[UUID] = ColReader[UUID] { (col, row) => row.uuidOption(col)} + implicit val jbigDecimalReader: ColReader[java.math.BigDecimal] = ColReader { (col, row) => + row.javaBigDecimalOption(col) + } + implicit val bigDecimalReader: ColReader[BigDecimal] = ColReader { (col, row) => row.bigDecimalOption(col) } + implicit val jBigIntReader: ColReader[java.math.BigInteger] = ColReader { (col, row) => + row.javaBigIntegerOption(col) + } + implicit val bigIntReader: ColReader[BigInt] = ColReader { (col, row) => row.bigIntOption(col) } + implicit val boolReader: ColReader[Boolean] = ColReader { (col, row) => row.boolOption(col) } + implicit val byteArrayReader: ColReader[Array[Byte]] = ColReader { (col, row) => row.byteArrayOption(col) } + implicit val byteReader: ColReader[Byte] = ColReader { (col, row) => row.byteOption(col) } + implicit val dateReader: ColReader[Date] = ColReader { (col, row) => row.dateOption(col) } + implicit val localDateReader: ColReader[LocalDate] = ColReader { (col, row) => row.localDateOption(col) } + implicit val instantReader: ColReader[Instant] = ColReader { (col, row) => row.instantOption(col) } + implicit val doubleReader: ColReader[Double] = ColReader { (col, row) => row.doubleOption(col) } + implicit val intReader: ColReader[Int] = ColReader { (col, row) => row.intOption(col) } + implicit val longReader: ColReader[Long] = ColReader { (col, row) => row.longOption(col) } + implicit val shortReader: ColReader[Short] = ColReader { (col, row) => row.shortOption(col) } + implicit val stringReader: ColReader[String] = ColReader { (col, row) => row.stringOption(col) } + implicit val uuidReader: ColReader[UUID] = ColReader[UUID] { (col, row) => row.uuidOption(col) } def enumReader[A <: Enumeration](e: A): ColReader[e.Value] = { - intReader.flatMap(id => ColReader[e.Value] { (_, _) => - e.values.find(_.id == id) - }) + intReader.flatMap(id => + ColReader[e.Value] { (_, _) => + e.values.find(_.id == id) + } + ) } } diff --git a/relate/src/main/scala/com/lucidchart/relate/CollectionsParser.scala b/relate/src/main/scala/com/lucidchart/relate/CollectionsParser.scala index 61a1edc..c01a393 100644 --- a/relate/src/main/scala/com/lucidchart/relate/CollectionsParser.scala +++ b/relate/src/main/scala/com/lucidchart/relate/CollectionsParser.scala @@ -4,7 +4,7 @@ import scala.collection.compat._ import scala.language.higherKinds trait CollectionsParser { - def limitedCollection[B: RowParser, Col[_]](maxRows: Long)(implicit factory: Factory[B, Col[B]]) = + def limitedCollection[B: RowParser, Col[_]](maxRows: Long)(implicit factory: Factory[B, Col[B]]) = RowParser { result => val builder = factory.newBuilder @@ -24,8 +24,9 @@ trait CollectionsParser { implicit def collection[B: RowParser, Col[_]](implicit factory: Factory[B, Col[B]]) = limitedCollection[B, Col](Long.MaxValue) - implicit def pairCollection[Key: RowParser, Value: RowParser, PairCol[_, _]] - (implicit factory: Factory[(Key, Value), PairCol[Key, Value]]) = + implicit def pairCollection[Key: RowParser, Value: RowParser, PairCol[_, _]](implicit + factory: Factory[(Key, Value), PairCol[Key, Value]] + ) = RowParser { result => val builder = factory.newBuilder @@ -38,4 +39,4 @@ trait CollectionsParser { builder.result } -} \ No newline at end of file +} diff --git a/relate/src/main/scala/com/lucidchart/relate/CollectionsSql.scala b/relate/src/main/scala/com/lucidchart/relate/CollectionsSql.scala index 262fefe..a9e23db 100644 --- a/relate/src/main/scala/com/lucidchart/relate/CollectionsSql.scala +++ b/relate/src/main/scala/com/lucidchart/relate/CollectionsSql.scala @@ -28,29 +28,50 @@ import scala.language.higherKinds trait CollectionsSql { self: Sql => /** - * Execute the query and get the auto-incremented keys using a RowParser. Provided for the case - * that a primary key is not an Int or BigInt - * @param parser the RowParser that can parse the returned keys - * @param connection the connection to use when executing the query - * @return the auto-incremented keys + * Execute the query and get the auto-incremented keys using a RowParser. Provided for the case that a primary key is + * not an Int or BigInt + * @param parser + * the RowParser that can parse the returned keys + * @param connection + * the connection to use when executing the query + * @return + * the auto-incremented keys */ - def executeInsertCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]], connection: Connection): T[U] = insertionStatement.execute(_.asCollection(parser)) + def executeInsertCollection[U, T[_]]( + parser: SqlRow => U + )(implicit factory: Factory[U, T[U]], connection: Connection): T[U] = + insertionStatement.execute(_.asCollection(parser)) /** * Execute this query and get back the result as an arbitrary collection of records - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as an arbitrary collection of records + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as an arbitrary collection of records */ - def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]], connection: Connection): T[U] = normalStatement.execute(_.asCollection(parser)) - def asCollection[U: RowParser, T[_]]()(implicit factory: Factory[U, T[U]], connection: Connection): T[U] = normalStatement.execute(_.asCollection[U, T]) + def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]], connection: Connection): T[U] = + normalStatement.execute(_.asCollection(parser)) + def asCollection[U: RowParser, T[_]]()(implicit factory: Factory[U, T[U]], connection: Connection): T[U] = + normalStatement.execute(_.asCollection[U, T]) /** * Execute this query and get back the result as an arbitrary collection of key value pairs - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as an arbitrary collection of key value pairs + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as an arbitrary collection of key value pairs */ - def asPairCollection[U, V, T[_, _]](parser: SqlRow => (U, V))(implicit factory: Factory[(U, V), T[U, V]], connection: Connection): T[U, V] = normalStatement.execute(_.asPairCollection(parser)) - def asPairCollection[U, V, T[_, _]]()(implicit factory: Factory[(U, V), T[U, V]], connection: Connection, p: RowParser[(U, V)]): T[U, V] = normalStatement.execute(_.asPairCollection[U, V, T]) + def asPairCollection[U, V, T[_, _]]( + parser: SqlRow => (U, V) + )(implicit factory: Factory[(U, V), T[U, V]], connection: Connection): T[U, V] = + normalStatement.execute(_.asPairCollection(parser)) + def asPairCollection[U, V, T[_, _]]()(implicit + factory: Factory[(U, V), T[U, V]], + connection: Connection, + p: RowParser[(U, V)] + ): T[U, V] = normalStatement.execute(_.asPairCollection[U, V, T]) } diff --git a/relate/src/main/scala/com/lucidchart/relate/CollectionsSqlResult.scala b/relate/src/main/scala/com/lucidchart/relate/CollectionsSqlResult.scala index 00c5d26..eac76bc 100644 --- a/relate/src/main/scala/com/lucidchart/relate/CollectionsSqlResult.scala +++ b/relate/src/main/scala/com/lucidchart/relate/CollectionsSqlResult.scala @@ -5,11 +5,12 @@ import scala.collection.compat._ import scala.collection.mutable import scala.language.higherKinds - trait CollectionsSqlResult { self: SqlResult => - def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]]): T[U] = asCollection(parser, Long.MaxValue) - def asCollection[U: RowParser, T[_]]()(implicit factory: Factory[U, T[U]]): T[U] = asCollection(implicitly[RowParser[U]].parse, Long.MaxValue) + def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]]): T[U] = + asCollection(parser, Long.MaxValue) + def asCollection[U: RowParser, T[_]]()(implicit factory: Factory[U, T[U]]): T[U] = + asCollection(implicitly[RowParser[U]].parse, Long.MaxValue) protected def asCollection[U: RowParser, T[_]](maxRows: Long)(implicit factory: Factory[U, T[U]]): T[U] = asCollection(implicitly[RowParser[U]].parse, maxRows) protected def asCollection[U, T[_]](parser: SqlRow => U, maxRows: Long)(implicit factory: Factory[U, T[U]]): T[U] = { @@ -27,10 +28,15 @@ trait CollectionsSqlResult { self: SqlResult => def asPairCollection[U, V, T[_, _]]()(implicit p: RowParser[(U, V)], factory: Factory[(U, V), T[U, V]]): T[U, V] = { asPairCollection(p.parse, Long.MaxValue) } - def asPairCollection[U, V, T[_, _]](parser: SqlRow => (U, V))(implicit factory: Factory[(U, V), T[U, V]]): T[U, V] = asPairCollection(parser, Long.MaxValue) - protected def asPairCollection[U, V, T[_, _]](maxRows: Long)(implicit p: RowParser[(U, V)], factory: Factory[(U, V), T[U, V]]): T[U, V] = + def asPairCollection[U, V, T[_, _]](parser: SqlRow => (U, V))(implicit factory: Factory[(U, V), T[U, V]]): T[U, V] = + asPairCollection(parser, Long.MaxValue) + protected def asPairCollection[U, V, T[_, _]]( + maxRows: Long + )(implicit p: RowParser[(U, V)], factory: Factory[(U, V), T[U, V]]): T[U, V] = asPairCollection(p.parse, maxRows) - protected def asPairCollection[U, V, T[_, _]](parser: SqlRow => (U, V), maxRows: Long)(implicit factory: Factory[(U, V), T[U, V]]): T[U, V] = { + protected def asPairCollection[U, V, T[_, _]](parser: SqlRow => (U, V), maxRows: Long)(implicit + factory: Factory[(U, V), T[U, V]] + ): T[U, V] = { val builder = factory.newBuilder withResultSet { resultSet => diff --git a/relate/src/main/scala/com/lucidchart/relate/InterpolatedQuery.scala b/relate/src/main/scala/com/lucidchart/relate/InterpolatedQuery.scala index 75544d6..4d07dd2 100644 --- a/relate/src/main/scala/com/lucidchart/relate/InterpolatedQuery.scala +++ b/relate/src/main/scala/com/lucidchart/relate/InterpolatedQuery.scala @@ -2,7 +2,9 @@ package com.lucidchart.relate import java.sql.{Connection, PreparedStatement} -class InterpolatedQuery(protected val parsedQuery: String, protected val params: Seq[Parameter]) extends Sql with MultipleParameter { +class InterpolatedQuery(protected val parsedQuery: String, protected val params: Seq[Parameter]) + extends Sql + with MultipleParameter { def +(query: InterpolatedQuery) = new InterpolatedQuery(parsedQuery + query.parsedQuery, params ++ query.params) @@ -11,11 +13,13 @@ class InterpolatedQuery(protected val parsedQuery: String, protected val params: def appendPlaceholders(stringBuilder: StringBuilder) = stringBuilder ++= parsedQuery def withTimeout(seconds: Int): InterpolatedQuery = new InterpolatedQuery(parsedQuery, params) { - override protected def normalStatement(implicit conn: Connection) = new BaseStatement(conn) with NormalStatementPreparer { + override protected def normalStatement(implicit conn: Connection) = new BaseStatement(conn) + with NormalStatementPreparer { override def timeout = Some(seconds) } - override protected def insertionStatement(implicit conn: Connection) = new BaseStatement(conn) with InsertionStatementPreparer { + override protected def insertionStatement(implicit conn: Connection) = new BaseStatement(conn) + with InsertionStatementPreparer { override def timeout = Some(seconds) } diff --git a/relate/src/main/scala/com/lucidchart/relate/Parameterizable.scala b/relate/src/main/scala/com/lucidchart/relate/Parameterizable.scala index bbb72c2..0e9f5ce 100644 --- a/relate/src/main/scala/com/lucidchart/relate/Parameterizable.scala +++ b/relate/src/main/scala/com/lucidchart/relate/Parameterizable.scala @@ -8,12 +8,14 @@ import java.util.UUID trait Parameterizable[-A] { final def contraMap[B](f: B => A) = Parameterizable((statement, i, value: B) => set(statement, i, f(value)), setNull) + /** - * Set the parameterized value at index {@code i} in the prepared statement to the {@code value}. + * Set the parameterized value at index {@code i} in the prepared statement to the {@code value}. */ def set(statement: PreparedStatement, i: Int, value: A) + /** - * Set the parameterized value at index {@code i} in the prepared statement to {@code null}. + * Set the parameterized value at index {@code i} in the prepared statement to {@code null}. */ def setNull(statement: PreparedStatement, i: Int) final def setOption(statement: PreparedStatement, i: Int, value: Option[A]) = @@ -21,6 +23,7 @@ trait Parameterizable[-A] { } object Parameterizable { + /** * Create new Parameterizable instance from functions for set and setNull * @@ -29,17 +32,20 @@ object Parameterizable { * @param g * The function to implement [[Parameterizable#setNull]] with */ - def apply[A](f: (PreparedStatement, Int, A) => Unit, g: (PreparedStatement, Int) => Unit) = new Parameterizable [A] { + def apply[A](f: (PreparedStatement, Int, A) => Unit, g: (PreparedStatement, Int) => Unit) = new Parameterizable[A] { def set(statement: PreparedStatement, i: Int, value: A) = f(statement, i, value) def setNull(statement: PreparedStatement, i: Int) = g(statement, i) } - def from[A, B : Parameterizable](f: A => B) = implicitly[Parameterizable[B]].contraMap(f) + def from[A, B: Parameterizable](f: A => B) = implicitly[Parameterizable[B]].contraMap(f) implicit val array = apply(_.setArray(_, _: Array), _.setNull(_, Types.ARRAY)) // ideally, this would be named jBigDecimal, but that wouldn't be backwards compatibility implicit val bigDecimal = apply(_.setBigDecimal(_, _: java.math.BigDecimal), _.setNull(_, Types.DECIMAL)) - implicit val scalaBigDecimal = apply((stmt: PreparedStatement, i: Int, v: scala.math.BigDecimal) => stmt.setBigDecimal(i, v.bigDecimal), _.setNull(_, Types.DECIMAL)) + implicit val scalaBigDecimal = apply( + (stmt: PreparedStatement, i: Int, v: scala.math.BigDecimal) => stmt.setBigDecimal(i, v.bigDecimal), + _.setNull(_, Types.DECIMAL) + ) implicit val blob = apply(_.setBlob(_, _: Blob), _.setNull(_, Types.BLOB)) implicit val boolean = apply(_.setBoolean(_, _: Boolean), _.setNull(_, Types.BOOLEAN)) implicit val byte = apply(_.setByte(_, _: Byte), _.setNull(_, Types.TINYINT)) diff --git a/relate/src/main/scala/com/lucidchart/relate/Parameters.scala b/relate/src/main/scala/com/lucidchart/relate/Parameters.scala index c86354a..45fbac1 100644 --- a/relate/src/main/scala/com/lucidchart/relate/Parameters.scala +++ b/relate/src/main/scala/com/lucidchart/relate/Parameters.scala @@ -36,11 +36,11 @@ trait Parameter { } object Parameter { - implicit def single[A : Parameterizable](value: A): SingleParameter = new SingleParameter { + implicit def single[A: Parameterizable](value: A): SingleParameter = new SingleParameter { protected[this] def set(statement: PreparedStatement, i: Int) = implicitly[Parameterizable[A]].set(statement, i, value) } - implicit def singleOption[A : Parameterizable](value: Option[A]): SingleParameter = new SingleParameter { + implicit def singleOption[A: Parameterizable](value: Option[A]): SingleParameter = new SingleParameter { protected[this] def set(statement: PreparedStatement, i: Int) = implicitly[Parameterizable[A]].setOption(statement, i, value) } @@ -50,27 +50,565 @@ object Parameter { implicit def fromArray[A](it: Array[A])(implicit sp: SP[A]) = new TupleParameter(it.map(sp)) implicit def fromIterable[A](it: Iterable[A])(implicit sp: SP[A]) = new TupleParameter(it.map(sp)) implicit def fromTuple1[T1](t: Tuple1[T1])(implicit sp1: SP[T1]) = TupleParameter(sp1(t._1)) - implicit def fromTuple2[T1, T2](t: Tuple2[T1,T2])(implicit sp1: SP[T1], sp2: SP[T2]) = TupleParameter(sp1(t._1), sp2(t._2)) - implicit def fromTuple3[T1, T2, T3](t: Tuple3[T1,T2,T3])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3)) - implicit def fromTuple4[T1, T2, T3, T4](t: Tuple4[T1,T2,T3,T4])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4)) - implicit def fromTuple5[T1, T2, T3, T4, T5](t: Tuple5[T1,T2,T3,T4,T5])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5)) - implicit def fromTuple6[T1, T2, T3, T4, T5, T6](t: Tuple6[T1,T2,T3,T4,T5,T6])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6)) - implicit def fromTuple7[T1, T2, T3, T4, T5, T6, T7](t: Tuple7[T1,T2,T3,T4,T5,T6,T7])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7)) - implicit def fromTuple8[T1, T2, T3, T4, T5, T6, T7, T8](t: Tuple8[T1,T2,T3,T4,T5,T6,T7,T8])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8)) - implicit def fromTuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](t: Tuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9)) - implicit def fromTuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](t: Tuple10[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10)) - implicit def fromTuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](t: Tuple11[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11)) - implicit def fromTuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](t: Tuple12[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12)) - implicit def fromTuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](t: Tuple13[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13)) - implicit def fromTuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](t: Tuple14[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14)) - implicit def fromTuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](t: Tuple15[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15)) - implicit def fromTuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](t: Tuple16[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16)) - implicit def fromTuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](t: Tuple17[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16], sp17: SP[T17]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16), sp17(t._17)) - implicit def fromTuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](t: Tuple18[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16], sp17: SP[T17], sp18: SP[T18]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16), sp17(t._17), sp18(t._18)) - implicit def fromTuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](t: Tuple19[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16], sp17: SP[T17], sp18: SP[T18], sp19: SP[T19]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16), sp17(t._17), sp18(t._18), sp19(t._19)) - implicit def fromTuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](t: Tuple20[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16], sp17: SP[T17], sp18: SP[T18], sp19: SP[T19], sp20: SP[T20]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16), sp17(t._17), sp18(t._18), sp19(t._19), sp20(t._20)) - implicit def fromTuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](t: Tuple21[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16], sp17: SP[T17], sp18: SP[T18], sp19: SP[T19], sp20: SP[T20], sp21: SP[T21]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16), sp17(t._17), sp18(t._18), sp19(t._19), sp20(t._20), sp21(t._21)) - implicit def fromTuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](t: Tuple22[T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8], sp9: SP[T9], sp10: SP[T10], sp11: SP[T11], sp12: SP[T12], sp13: SP[T13], sp14: SP[T14], sp15: SP[T15], sp16: SP[T16], sp17: SP[T17], sp18: SP[T18], sp19: SP[T19], sp20: SP[T20], sp21: SP[T21], sp22: SP[T22]) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9), sp10(t._10), sp11(t._11), sp12(t._12), sp13(t._13), sp14(t._14), sp15(t._15), sp16(t._16), sp17(t._17), sp18(t._18), sp19(t._19), sp20(t._20), sp21(t._21), sp22(t._22)) + implicit def fromTuple2[T1, T2](t: Tuple2[T1, T2])(implicit sp1: SP[T1], sp2: SP[T2]) = + TupleParameter(sp1(t._1), sp2(t._2)) + implicit def fromTuple3[T1, T2, T3](t: Tuple3[T1, T2, T3])(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3]) = + TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3)) + implicit def fromTuple4[T1, T2, T3, T4]( + t: Tuple4[T1, T2, T3, T4] + )(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4]) = + TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4)) + implicit def fromTuple5[T1, T2, T3, T4, T5]( + t: Tuple5[T1, T2, T3, T4, T5] + )(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5]) = + TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5)) + implicit def fromTuple6[T1, T2, T3, T4, T5, T6]( + t: Tuple6[T1, T2, T3, T4, T5, T6] + )(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6]) = + TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6)) + implicit def fromTuple7[T1, T2, T3, T4, T5, T6, T7]( + t: Tuple7[T1, T2, T3, T4, T5, T6, T7] + )(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7]) = + TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7)) + implicit def fromTuple8[T1, T2, T3, T4, T5, T6, T7, T8]( + t: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] + )(implicit sp1: SP[T1], sp2: SP[T2], sp3: SP[T3], sp4: SP[T4], sp5: SP[T5], sp6: SP[T6], sp7: SP[T7], sp8: SP[T8]) = + TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8)) + implicit def fromTuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](t: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9])(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9] + ) = TupleParameter(sp1(t._1), sp2(t._2), sp3(t._3), sp4(t._4), sp5(t._5), sp6(t._6), sp7(t._7), sp8(t._8), sp9(t._9)) + implicit def fromTuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]( + t: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10) + ) + implicit def fromTuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]( + t: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11) + ) + implicit def fromTuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]( + t: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12) + ) + implicit def fromTuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]( + t: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13) + ) + implicit def fromTuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]( + t: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14) + ) + implicit def fromTuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]( + t: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15) + ) + implicit def fromTuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]( + t: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16) + ) + implicit def fromTuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]( + t: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16], + sp17: SP[T17] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16), + sp17(t._17) + ) + implicit def fromTuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]( + t: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16], + sp17: SP[T17], + sp18: SP[T18] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16), + sp17(t._17), + sp18(t._18) + ) + implicit def fromTuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]( + t: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16], + sp17: SP[T17], + sp18: SP[T18], + sp19: SP[T19] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16), + sp17(t._17), + sp18(t._18), + sp19(t._19) + ) + implicit def fromTuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]( + t: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16], + sp17: SP[T17], + sp18: SP[T18], + sp19: SP[T19], + sp20: SP[T20] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16), + sp17(t._17), + sp18(t._18), + sp19(t._19), + sp20(t._20) + ) + implicit def fromTuple21[ + T1, + T2, + T3, + T4, + T5, + T6, + T7, + T8, + T9, + T10, + T11, + T12, + T13, + T14, + T15, + T16, + T17, + T18, + T19, + T20, + T21 + ](t: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21])(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16], + sp17: SP[T17], + sp18: SP[T18], + sp19: SP[T19], + sp20: SP[T20], + sp21: SP[T21] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16), + sp17(t._17), + sp18(t._18), + sp19(t._19), + sp20(t._20), + sp21(t._21) + ) + implicit def fromTuple22[ + T1, + T2, + T3, + T4, + T5, + T6, + T7, + T8, + T9, + T10, + T11, + T12, + T13, + T14, + T15, + T16, + T17, + T18, + T19, + T20, + T21, + T22 + ]( + t: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] + )(implicit + sp1: SP[T1], + sp2: SP[T2], + sp3: SP[T3], + sp4: SP[T4], + sp5: SP[T5], + sp6: SP[T6], + sp7: SP[T7], + sp8: SP[T8], + sp9: SP[T9], + sp10: SP[T10], + sp11: SP[T11], + sp12: SP[T12], + sp13: SP[T13], + sp14: SP[T14], + sp15: SP[T15], + sp16: SP[T16], + sp17: SP[T17], + sp18: SP[T18], + sp19: SP[T19], + sp20: SP[T20], + sp21: SP[T21], + sp22: SP[T22] + ) = TupleParameter( + sp1(t._1), + sp2(t._2), + sp3(t._3), + sp4(t._4), + sp5(t._5), + sp6(t._6), + sp7(t._7), + sp8(t._8), + sp9(t._9), + sp10(t._10), + sp11(t._11), + sp12(t._12), + sp13(t._13), + sp14(t._14), + sp15(t._15), + sp16(t._16), + sp17(t._17), + sp18(t._18), + sp19(t._19), + sp20(t._20), + sp21(t._21), + sp22(t._22) + ) implicit def fromTuples[A](seq: Seq[A])(implicit tp: A => TupleParameter) = new TuplesParameter(seq.map(tp)) } @@ -110,7 +648,7 @@ object TupleParameter { class TuplesParameter(val params: Iterable[TupleParameter]) extends MultipleParameter { def appendPlaceholders(stringBuilder: StringBuilder) = { - if(params.nonEmpty) { + if (params.nonEmpty) { params.foreach { param => stringBuilder.append("(") param.appendPlaceholders(stringBuilder) diff --git a/relate/src/main/scala/com/lucidchart/relate/ResultSetWrapper.scala b/relate/src/main/scala/com/lucidchart/relate/ResultSetWrapper.scala index 833de2e..0b05981 100644 --- a/relate/src/main/scala/com/lucidchart/relate/ResultSetWrapper.scala +++ b/relate/src/main/scala/com/lucidchart/relate/ResultSetWrapper.scala @@ -6,9 +6,10 @@ trait ResultSetWrapper { val resultSet: java.sql.ResultSet /** - * Determine if the last value extracted from the result set was null - * @return whether the last value was null - */ + * Determine if the last value extracted from the result set was null + * @return + * whether the last value was null + */ def wasNull(): Boolean = resultSet.wasNull() def next(): Boolean = resultSet.next() @@ -16,23 +17,23 @@ trait ResultSetWrapper { def withResultSet[A](f: (java.sql.ResultSet) => A) = { try { f(resultSet) - } - finally { + } finally { resultSet.close() } } /** - * Determine if the result set contains the given column name - * @param column the column name to check - * @return whether or not the result set contains that column name - */ + * Determine if the result set contains the given column name + * @param column + * the column name to check + * @return + * whether or not the result set contains that column name + */ def hasColumn(column: String): Boolean = { try { resultSet.findColumn(column) true - } - catch { + } catch { case e: SQLException => false } } diff --git a/relate/src/main/scala/com/lucidchart/relate/RowIterator.scala b/relate/src/main/scala/com/lucidchart/relate/RowIterator.scala index 21bb5c4..1c1d952 100644 --- a/relate/src/main/scala/com/lucidchart/relate/RowIterator.scala +++ b/relate/src/main/scala/com/lucidchart/relate/RowIterator.scala @@ -3,10 +3,12 @@ package com.lucidchart.relate import java.sql.PreparedStatement private[relate] object RowIterator { - def apply[A](parser: SqlRow => A, stmt: PreparedStatement, resultSet: SqlResult) = new RowIterator(parser, stmt, resultSet) + def apply[A](parser: SqlRow => A, stmt: PreparedStatement, resultSet: SqlResult) = + new RowIterator(parser, stmt, resultSet) } -private[relate] class RowIterator[A](parser: SqlRow => A, stmt: PreparedStatement, result: SqlResult) extends Iterator[A] { +private[relate] class RowIterator[A](parser: SqlRow => A, stmt: PreparedStatement, result: SqlResult) + extends Iterator[A] { private var _hasNext = result.next() @@ -19,13 +21,15 @@ private[relate] class RowIterator[A](parser: SqlRow => A, stmt: PreparedStatemen /** * Determine whether there is another row or not - * @return whether there is another row + * @return + * whether there is another row */ override def hasNext(): Boolean = _hasNext /** * Parse the next row using the RowParser passed into the class - * @return the parsed record + * @return + * the parsed record */ override def next(): A = { val ret = parser(result.asRow) @@ -33,7 +37,7 @@ private[relate] class RowIterator[A](parser: SqlRow => A, stmt: PreparedStatemen _hasNext = result.next() } - //if we've iterated through the whole thing, close resources + // if we've iterated through the whole thing, close resources if (!_hasNext) { close() } diff --git a/relate/src/main/scala/com/lucidchart/relate/RowParser.scala b/relate/src/main/scala/com/lucidchart/relate/RowParser.scala index 8ff6217..8306118 100644 --- a/relate/src/main/scala/com/lucidchart/relate/RowParser.scala +++ b/relate/src/main/scala/com/lucidchart/relate/RowParser.scala @@ -34,11 +34,13 @@ object RowParser extends CollectionsParser { val key = implicitly[RowParser[Key]].parse(result) val value = implicitly[RowParser[Value]].parse(result) - mm.get(key).map { foundValue => - mm += (key -> (foundValue + value)) - }.getOrElse { - mm += (key -> Set(value)) - } + mm.get(key) + .map { foundValue => + mm += (key -> (foundValue + value)) + } + .getOrElse { + mm += (key -> Set(value)) + } } } mm.toMap diff --git a/relate/src/main/scala/com/lucidchart/relate/SqlQuery.scala b/relate/src/main/scala/com/lucidchart/relate/SqlQuery.scala index d60f72e..5e744c0 100644 --- a/relate/src/main/scala/com/lucidchart/relate/SqlQuery.scala +++ b/relate/src/main/scala/com/lucidchart/relate/SqlQuery.scala @@ -35,9 +35,11 @@ trait Sql extends CollectionsSql { protected def applyParams(stmt: PreparedStatement) = self.applyParams(stmt) } - protected def normalStatement(implicit connection: Connection) = new BaseStatement(connection) with NormalStatementPreparer + protected def normalStatement(implicit connection: Connection) = new BaseStatement(connection) + with NormalStatementPreparer - protected def insertionStatement(implicit connection: Connection) = new BaseStatement(connection) with InsertionStatementPreparer + protected def insertionStatement(implicit connection: Connection) = new BaseStatement(connection) + with InsertionStatementPreparer protected def streamedStatement(fetchSize: Int)(implicit connection: Connection) = { val fetchSize_ = fetchSize @@ -52,8 +54,8 @@ trait Sql extends CollectionsSql { override def toString = parsedQuery /** - * Calls [[PreparedStatement#toString]], which for many JDBC implementations is the SQL query after parameter substitution. - * This is intended primarily for ad-hoc debugging. + * Calls [[PreparedStatement#toString]], which for many JDBC implementations is the SQL query after parameter + * substitution. This is intended primarily for ad-hoc debugging. * * For more routine logging, consider other solutions, such as [[https://code.google.com/p/log4jdbc/ log4jdbc]]. */ @@ -65,165 +67,223 @@ trait Sql extends CollectionsSql { } /** - * Provides direct access to the underlying java.sql.ResultSet. - * Note that this ResultSet must be closed manually or by wrapping it in SqlResult. - * {{{ - * val results = SQL(query).results() - * . . . - * SqlResult(results).asList[A](parser) - * // or - * results.close() - * }}} - * @return java.sql.ResultSet + * Provides direct access to the underlying java.sql.ResultSet. Note that this ResultSet must be closed manually or by + * wrapping it in SqlResult. + * {{{ + * val results = SQL(query).results() + * . . . + * SqlResult(results).asList[A](parser) + * // or + * results.close() + * }}} + * @return + * java.sql.ResultSet */ def results()(implicit connection: Connection): ResultSet = normalStatement.results() /** * Execute a statement - * @param connection the db connection to use when executing the query - * @return true if the first result is a ResultSet object; false if the first result is an update count or there is no result + * @param connection + * the db connection to use when executing the query + * @return + * true if the first result is a ResultSet object; false if the first result is an update count or there is no + * result */ def execute()(implicit connection: Connection): Boolean = normalStatement.execute() /** * Execute an update - * @param connection the db connection to use when executing the query - * @return the number of rows update by the query + * @param connection + * the db connection to use when executing the query + * @return + * the number of rows update by the query */ def executeUpdate()(implicit connection: Connection): Int = normalStatement.executeUpdate() /** * Execute the query and get the auto-incremented key as an Int - * @param connection the connection to use when executing the query - * @return the auto-incremented key as an Int + * @param connection + * the connection to use when executing the query + * @return + * the auto-incremented key as an Int */ - def executeInsertInt()(implicit connection: Connection): Int = insertionStatement.execute(_.asSingle(RowParser.insertInt)) + def executeInsertInt()(implicit connection: Connection): Int = + insertionStatement.execute(_.asSingle(RowParser.insertInt)) /** * Execute the query and get the auto-incremented keys as a List of Ints - * @param connection the connection to use when executing the query - * @return the auto-incremented keys as a List of Ints + * @param connection + * the connection to use when executing the query + * @return + * the auto-incremented keys as a List of Ints */ - def executeInsertInts()(implicit connection: Connection): List[Int] = insertionStatement.execute(_.asList(RowParser.insertInt)) + def executeInsertInts()(implicit connection: Connection): List[Int] = + insertionStatement.execute(_.asList(RowParser.insertInt)) /** * Execute the query and get the auto-incremented key as a Long - * @param connection the connection to use when executing the query - * @return the auto-incremented key as a Long + * @param connection + * the connection to use when executing the query + * @return + * the auto-incremented key as a Long */ - def executeInsertLong()(implicit connection: Connection): Long = insertionStatement.execute(_.asSingle(RowParser.insertLong)) + def executeInsertLong()(implicit connection: Connection): Long = + insertionStatement.execute(_.asSingle(RowParser.insertLong)) /** * Execute the query and get the auto-incremented keys as a a List of Longs - * @param connection the connection to use when executing the query - * @return the auto-incremented keys as a a List of Longs + * @param connection + * the connection to use when executing the query + * @return + * the auto-incremented keys as a a List of Longs */ - def executeInsertLongs()(implicit connection: Connection): List[Long] = insertionStatement.execute(_.asList(RowParser.insertLong)) + def executeInsertLongs()(implicit connection: Connection): List[Long] = + insertionStatement.execute(_.asList(RowParser.insertLong)) /** - * Execute the query and get the auto-incremented key using a RowParser. Provided for the case - * that a primary key is not an Int or BigInt - * @param parser the RowParser that can parse the returned key - * @param connection the connection to use when executing the query - * @return the auto-incremented key + * Execute the query and get the auto-incremented key using a RowParser. Provided for the case that a primary key is + * not an Int or BigInt + * @param parser + * the RowParser that can parse the returned key + * @param connection + * the connection to use when executing the query + * @return + * the auto-incremented key */ - def executeInsertSingle[U](parser: SqlRow => U)(implicit connection: Connection): U = insertionStatement.execute(_.asSingle(parser)) + def executeInsertSingle[U](parser: SqlRow => U)(implicit connection: Connection): U = + insertionStatement.execute(_.asSingle(parser)) def as[A: RowParser]()(implicit connection: Connection): A = normalStatement.execute(_.as[A]) /** * Execute this query and get back the result as a single record - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as a single record + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as a single record */ def asSingle[A](parser: SqlRow => A)(implicit connection: Connection): A = normalStatement.execute(_.asSingle(parser)) def asSingle[A: RowParser]()(implicit connection: Connection): A = normalStatement.execute(_.asSingle[A]) /** * Execute this query and get back the result as an optional single record - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as an optional single record + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as an optional single record */ - def asSingleOption[A](parser: SqlRow => A)(implicit connection: Connection): Option[A] = normalStatement.execute(_.asSingleOption(parser)) - def asSingleOption[A: RowParser]()(implicit connection: Connection): Option[A] = normalStatement.execute(_.asSingleOption[A]) + def asSingleOption[A](parser: SqlRow => A)(implicit connection: Connection): Option[A] = + normalStatement.execute(_.asSingleOption(parser)) + def asSingleOption[A: RowParser]()(implicit connection: Connection): Option[A] = + normalStatement.execute(_.asSingleOption[A]) /** * Execute this query and get back the result as a Set of records - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as a Set of records + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as a Set of records */ def asSet[A](parser: SqlRow => A)(implicit connection: Connection): Set[A] = normalStatement.execute(_.asSet(parser)) def asSet[A: RowParser]()(implicit connection: Connection): Set[A] = normalStatement.execute(_.asSet[A]) /** * Execute this query and get back the result as a sequence of records - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as a sequence of records + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as a sequence of records */ def asSeq[A](parser: SqlRow => A)(implicit connection: Connection): Seq[A] = normalStatement.execute(_.asSeq(parser)) def asSeq[A: RowParser]()(implicit connection: Connection): Seq[A] = normalStatement.execute(_.asSeq[A]) /** * Execute this query and get back the result as an iterable of records - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as an iterable of records + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as an iterable of records */ - def asIterable[A](parser: SqlRow => A)(implicit connection: Connection): Iterable[A] = normalStatement.execute(_.asIterable(parser)) - def asIterable[A: RowParser]()(implicit connection: Connection): Iterable[A] = normalStatement.execute(_.asIterable[A]) + def asIterable[A](parser: SqlRow => A)(implicit connection: Connection): Iterable[A] = + normalStatement.execute(_.asIterable(parser)) + def asIterable[A: RowParser]()(implicit connection: Connection): Iterable[A] = + normalStatement.execute(_.asIterable[A]) /** * Execute this query and get back the result as a List of records - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as a List of records + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as a List of records */ - def asList[A](parser: SqlRow => A)(implicit connection: Connection): List[A] = normalStatement.execute(_.asList(parser)) + def asList[A](parser: SqlRow => A)(implicit connection: Connection): List[A] = + normalStatement.execute(_.asList(parser)) def asList[A: RowParser]()(implicit connection: Connection): List[A] = normalStatement.execute(_.asList[A]) /** * Execute this query and get back the result as a Map of records - * @param parser the RowParser to use when parsing the result set. The RowParser should return a Tuple - * of size 2 containing the key and value - * @param connection the connection to use when executing the query - * @return the results as a Map of records + * @param parser + * the RowParser to use when parsing the result set. The RowParser should return a Tuple of size 2 containing the + * key and value + * @param connection + * the connection to use when executing the query + * @return + * the results as a Map of records */ - def asMap[U, V](parser: SqlRow => (U, V))(implicit connection: Connection): Map[U, V] = normalStatement.execute(_.asMap(parser)) - def asMap[U, V]()(implicit connection: Connection, p: RowParser[(U, V)]): Map[U, V] = normalStatement.execute(_.asMap[U, V]) + def asMap[U, V](parser: SqlRow => (U, V))(implicit connection: Connection): Map[U, V] = + normalStatement.execute(_.asMap(parser)) + def asMap[U, V]()(implicit connection: Connection, p: RowParser[(U, V)]): Map[U, V] = + normalStatement.execute(_.asMap[U, V]) - def asMultiMap[U, V](parser: SqlRow => (U, V))(implicit connection: Connection): Map[U, Set[V]] = normalStatement.execute(_.asMultiMap(parser)) - def asMultiMap[U, V]()(implicit connection: Connection, p: RowParser[(U, V)]): Map[U, Set[V]] = normalStatement.execute(_.asMultiMap[U, V]) + def asMultiMap[U, V](parser: SqlRow => (U, V))(implicit connection: Connection): Map[U, Set[V]] = + normalStatement.execute(_.asMultiMap(parser)) + def asMultiMap[U, V]()(implicit connection: Connection, p: RowParser[(U, V)]): Map[U, Set[V]] = + normalStatement.execute(_.asMultiMap[U, V]) /** - * Execute this query and get back the result as a single value. Assumes that there is only one - * row and one value in the result set. - * @param connection the connection to use when executing the query - * @return the results as a single value + * Execute this query and get back the result as a single value. Assumes that there is only one row and one value in + * the result set. + * @param connection + * the connection to use when executing the query + * @return + * the results as a single value */ def asScalar[A]()(implicit connection: Connection): A = normalStatement.execute(_.asScalar[A]()) /** - * Execute this query and get back the result as an optional single value. Assumes that there is - * only one row and one value in the result set. - * @param parser the RowParser to use when parsing the result set - * @param connection the connection to use when executing the query - * @return the results as an optional single value + * Execute this query and get back the result as an optional single value. Assumes that there is only one row and one + * value in the result set. + * @param parser + * the RowParser to use when parsing the result set + * @param connection + * the connection to use when executing the query + * @return + * the results as an optional single value */ def asScalarOption[A]()(implicit connection: Connection): Option[A] = normalStatement.execute(_.asScalarOption[A]()) /** - * The asIterator method returns an Iterator that will stream data out of the database. - * This avoids an OutOfMemoryError when dealing with large datasets. Bear in mind that many - * JDBC implementations will not allow additional queries to the connection before all records - * in the Iterator have been retrieved. - * @param parser the RowParser to parse rows with - * @param fetchSize the number of rows to fetch at a time, defaults to 100. If the JDBC Driver - * is MySQL, the fetchSize will always default to Int.MinValue, as MySQL's JDBC implementation - * ignores all other fetchSize values and only streams if fetchSize is Int.MinValue + * The asIterator method returns an Iterator that will stream data out of the database. This avoids an + * OutOfMemoryError when dealing with large datasets. Bear in mind that many JDBC implementations will not allow + * additional queries to the connection before all records in the Iterator have been retrieved. + * @param parser + * the RowParser to parse rows with + * @param fetchSize + * the number of rows to fetch at a time, defaults to 100. If the JDBC Driver is MySQL, the fetchSize will always + * default to Int.MinValue, as MySQL's JDBC implementation ignores all other fetchSize values and only streams if + * fetchSize is Int.MinValue */ def asIterator[A](parser: SqlRow => A, fetchSize: Int = 100)(implicit connection: Connection): Iterator[A] = { val prepared = streamedStatement(fetchSize) diff --git a/relate/src/main/scala/com/lucidchart/relate/SqlResult.scala b/relate/src/main/scala/com/lucidchart/relate/SqlResult.scala index 7d7e650..9b1014e 100644 --- a/relate/src/main/scala/com/lucidchart/relate/SqlResult.scala +++ b/relate/src/main/scala/com/lucidchart/relate/SqlResult.scala @@ -11,15 +11,13 @@ object SqlResult { /** * The SqlResult class is a wrapper around Java's ResultSet class. * - * It provides methods to allows users to retrieve specific columns by name and datatype, - * but also provides methods that can, given a [[com.lucidchart.relate.RowParser RowParser]], - * parse the entire result set as a collection of records returned by the parser. These methods are - * also defined in the Sql trait, and are most conveniently used when chained with parameter - * insertion. For how to do this, see the [[com.lucidchart.relate.Sql Sql]] trait - * documentation. + * It provides methods to allows users to retrieve specific columns by name and datatype, but also provides methods that + * can, given a [[com.lucidchart.relate.RowParser RowParser]], parse the entire result set as a collection of records + * returned by the parser. These methods are also defined in the Sql trait, and are most conveniently used when chained + * with parameter insertion. For how to do this, see the [[com.lucidchart.relate.Sql Sql]] trait documentation. * - * The extraction methods (int, string, long, etc.) also have "strict" counterparts. The "strict" - * methods are slightly faster, but do not do type checking or handle null values. + * The extraction methods (int, string, long, etc.) also have "strict" counterparts. The "strict" methods are slightly + * faster, but do not do type checking or handle null values. */ class SqlResult(val resultSet: java.sql.ResultSet) extends ResultSetWrapper with CollectionsSqlResult { @@ -55,15 +53,15 @@ class SqlResult(val resultSet: java.sql.ResultSet) extends ResultSetWrapper with def asScalarOption[A](): Option[A] = { if (resultSet.next()) { Some(resultSet.getObject(1).asInstanceOf[A]) - } - else { + } else { None } } /** * Get the metadata for the java.sql.ResultSet that underlies this SqlResult - * @return the metadata + * @return + * the metadata */ def getMetaData(): ResultSetMetaData = resultSet.getMetaData() } diff --git a/relate/src/main/scala/com/lucidchart/relate/SqlRow.scala b/relate/src/main/scala/com/lucidchart/relate/SqlRow.scala index 0d367c2..1cf7bf4 100644 --- a/relate/src/main/scala/com/lucidchart/relate/SqlRow.scala +++ b/relate/src/main/scala/com/lucidchart/relate/SqlRow.scala @@ -14,20 +14,26 @@ object SqlRow { } class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { + /** - * Get the number of the row the SqlResult is currently on - * @return the current row number - */ + * Get the number of the row the SqlResult is currently on + * @return + * the current row number + */ def getRow(): Int = resultSet.getRow() def strictArray(column: String): java.sql.Array = resultSet.getArray(column) def strictArrayOption(column: String): Option[java.sql.Array] = getResultSetOption(resultSet.getArray(column)) def strictAsciiStream(column: String): InputStream = resultSet.getAsciiStream(column) - def strictAsciiStreamOption(column: String): Option[InputStream] = getResultSetOption(resultSet.getAsciiStream(column)) + def strictAsciiStreamOption(column: String): Option[InputStream] = getResultSetOption( + resultSet.getAsciiStream(column) + ) def strictBigDecimal(column: String): BigDecimal = resultSet.getBigDecimal(column) def strictBigDecimalOption(column: String): Option[BigDecimal] = getResultSetOption(resultSet.getBigDecimal(column)) def strictBinaryStream(column: String): InputStream = resultSet.getBinaryStream(column) - def strictBinaryStreamOption(column: String): Option[InputStream] = getResultSetOption(resultSet.getBinaryStream(column)) + def strictBinaryStreamOption(column: String): Option[InputStream] = getResultSetOption( + resultSet.getBinaryStream(column) + ) def strictBlob(column: String): Blob = resultSet.getBlob(column) def strictBlobOption(column: String): Option[Blob] = getResultSetOption(resultSet.getBlob(column)) def strictBoolean(column: String): Boolean = resultSet.getBoolean(column) @@ -37,13 +43,17 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def strictBytes(column: String): Array[Byte] = resultSet.getBytes(column) def strictBytesOption(column: String): Option[Array[Byte]] = getResultSetOption(resultSet.getBytes(column)) def strictCharacterStream(column: String): Reader = resultSet.getCharacterStream(column) - def strictCharacterStreamOption(column: String): Option[Reader] = getResultSetOption(resultSet.getCharacterStream(column)) + def strictCharacterStreamOption(column: String): Option[Reader] = getResultSetOption( + resultSet.getCharacterStream(column) + ) def strictClob(column: String): Clob = resultSet.getClob(column) def strictClobOption(column: String): Option[Clob] = getResultSetOption(resultSet.getClob(column)) def strictDate(column: String): java.sql.Date = resultSet.getDate(column) def strictDateOption(column: String): Option[java.sql.Date] = getResultSetOption(resultSet.getDate(column)) def strictDate(column: String, cal: Calendar): java.sql.Date = resultSet.getDate(column, cal) - def strictDateOption(column: String, cal: Calendar): Option[java.sql.Date] = getResultSetOption(resultSet.getDate(column, cal)) + def strictDateOption(column: String, cal: Calendar): Option[java.sql.Date] = getResultSetOption( + resultSet.getDate(column, cal) + ) def strictDouble(column: String): Double = resultSet.getDouble(column) def strictDoubleOption(column: String): Option[Double] = getResultSetOption(resultSet.getDouble(column)) def strictFloat(column: String): Float = resultSet.getFloat(column) @@ -55,7 +65,9 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def strictLong(column: String): Long = resultSet.getLong(column) def strictLongOption(column: String): Option[Long] = getResultSetOption(resultSet.getLong(column)) def strictNCharacterStream(column: String): Reader = resultSet.getNCharacterStream(column) - def strictNCharacterStreamOption(column: String): Option[Reader] = getResultSetOption(resultSet.getNCharacterStream(column)) + def strictNCharacterStreamOption(column: String): Option[Reader] = getResultSetOption( + resultSet.getNCharacterStream(column) + ) def strictNClob(column: String): NClob = resultSet.getNClob(column) def strictNClobOption(column: String): Option[NClob] = getResultSetOption(resultSet.getNClob(column)) def strictNString(column: String): String = resultSet.getNString(column) @@ -63,7 +75,9 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def strictObject(column: String): Object = resultSet.getObject(column) def strictObjectOption(column: String): Option[Object] = getResultSetOption(resultSet.getObject(column)) def strictObject(column: String, map: Map[String, Class[_]]): Object = resultSet.getObject(column, map.asJava) - def strictObjectOption(column: String, map: Map[String, Class[_]]): Option[Object] = Option(resultSet.getObject(column, map.asJava)) + def strictObjectOption(column: String, map: Map[String, Class[_]]): Option[Object] = Option( + resultSet.getObject(column, map.asJava) + ) def strictRef(column: String): Ref = resultSet.getRef(column) def strictRefOption(column: String): Option[Ref] = getResultSetOption(resultSet.getRef(column)) def strictRowId(column: String): RowId = resultSet.getRowId(column) @@ -81,7 +95,9 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def strictTimestamp(column: String): Timestamp = resultSet.getTimestamp(column) def strictTimestampOption(column: String): Option[Timestamp] = getResultSetOption(resultSet.getTimestamp(column)) def strictTimestamp(column: String, cal: Calendar): Timestamp = resultSet.getTimestamp(column, cal) - def strictTimestampOption(column: String, cal: Calendar): Option[Timestamp] = getResultSetOption(resultSet.getTimestamp(column, cal)) + def strictTimestampOption(column: String, cal: Calendar): Option[Timestamp] = getResultSetOption( + resultSet.getTimestamp(column, cal) + ) def strictURL(column: String): URL = resultSet.getURL(column) def strictURLOption(column: String): Option[URL] = getResultSetOption(resultSet.getURL(column)) @@ -115,9 +131,9 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def bigIntOption(column: String): Option[BigInt] = { extractOption(column) { case x: java.math.BigInteger => BigInt(x) - case x: Int => BigInt(x) - case x: Long => BigInt(x) - case x: String => BigInt(x) + case x: Int => BigInt(x) + case x: Long => BigInt(x) + case x: String => BigInt(x) } } @@ -128,14 +144,16 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def javaBigIntegerOption(column: String): Option[java.math.BigInteger] = { extractOption(column) { case x: java.math.BigInteger => x - case x: Int => java.math.BigInteger.valueOf(x) - case x: Long => java.math.BigInteger.valueOf(x) - case x: String => new java.math.BigInteger(x) + case x: Int => java.math.BigInteger.valueOf(x) + case x: Long => java.math.BigInteger.valueOf(x) + case x: String => new java.math.BigInteger(x) } } def javaBigDecimal(column: String): java.math.BigDecimal = javaBigDecimalOption(column).get - def javaBigDecimalOption(column: String): Option[java.math.BigDecimal] = getResultSetOption(resultSet.getBigDecimal(column)) + def javaBigDecimalOption(column: String): Option[java.math.BigDecimal] = getResultSetOption( + resultSet.getBigDecimal(column) + ) def date(column: String): java.util.Date = dateOption(column).get // Timestamp documentation says that "it is recommended that code not view Timestamp values generically as an instance @@ -149,15 +167,16 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def localDateOption(column: String): Option[LocalDate] = strictDateOption(column).map(_.toLocalDate) def instant(column: String): Instant = instantOption(column).get - def instantOption(column: String): Option[Instant] = getResultSetOption(resultSet.getTimestamp(column)).map(_.toInstant) + def instantOption(column: String): Option[Instant] = + getResultSetOption(resultSet.getTimestamp(column)).map(_.toInstant) def byteArray(column: String): Array[Byte] = byteArrayOption(column).get def byteArrayOption(column: String): Option[Array[Byte]] = { extractOption(column) { case x: Array[Byte] => x - case x: Blob => x.getBytes(0, x.length.toInt) - case x: Clob => x.getSubString(1, x.length.asInstanceOf[Int]).getBytes - case x: String => x.toCharArray.map(_.toByte) + case x: Blob => x.getBytes(0, x.length.toInt) + case x: Clob => x.getSubString(1, x.length.asInstanceOf[Int]).getBytes + case x: String => x.toCharArray.map(_.toByte) } } @@ -168,9 +187,9 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { case b => { val bytes = b match { case x: Array[Byte] => x - case x: Blob => x.getBytes(0, x.length.toInt) - case x: Clob => x.getSubString(1, x.length.asInstanceOf[Int]).getBytes - case x: String => x.toCharArray.map(_.toByte) + case x: Blob => x.getBytes(0, x.length.toInt) + case x: Clob => x.getSubString(1, x.length.asInstanceOf[Int]).getBytes + case x: String => x.toCharArray.map(_.toByte) } require(bytes.length == 16) @@ -191,8 +210,8 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { require(hex.length == 32) UUID.fromString( - hex.substring( 0, 8) + "-" + - hex.substring( 8, 12) + "-" + + hex.substring(0, 8) + "-" + + hex.substring(8, 12) + "-" + hex.substring(12, 16) + "-" + hex.substring(16, 20) + "-" + hex.substring(20, 32) @@ -204,37 +223,37 @@ class SqlRow(val resultSet: java.sql.ResultSet) extends ResultSetWrapper { def enumOption(column: String, e: Enumeration): Option[e.Value] = for { id <- intOption(column) value <- Try(e(id)).toOption - } yield(value) + } yield value protected def getResultSetOption[A](f: => A): Option[A] = { f match { - case x if (x == null || resultSet.wasNull()) => None - case x => Some(x) + case x if x == null || resultSet.wasNull() => None + case x => Some(x) } } protected[relate] def extractOption[A](column: String)(f: (Any) => A): Option[A] = { resultSet.getObject(column).asInstanceOf[Any] match { - case x if (x == null || resultSet.wasNull()) => None - case x => Some(f(x)) + case x if x == null || resultSet.wasNull() => None + case x => Some(f(x)) } } } /** - * The SqlResultTypes object provides syntactic sugar for RowParser creation. - * {{{ - * import com.lucidchart.relate._ - * import com.lucidchart.relate.SqlResultTypes._ - * - * val rowParser = RowParser { implicit row => - * (long("id"), string("name")) - * } - * }}} - * - * In this example, declaring "row" as implicit precludes the need to explicitly use the long and - * string methods on "row". - */ + * The SqlResultTypes object provides syntactic sugar for RowParser creation. + * {{{ + * import com.lucidchart.relate._ + * import com.lucidchart.relate.SqlResultTypes._ + * + * val rowParser = RowParser { implicit row => + * (long("id"), string("name")) + * } + * }}} + * + * In this example, declaring "row" as implicit precludes the need to explicitly use the long and string methods on + * "row". + */ object SqlResultTypes { def strictArray(column: String)(implicit sr: SqlRow) = sr.strictArray(column) def strictArrayOption(column: String)(implicit sr: SqlRow) = sr.strictArrayOption(column) @@ -277,7 +296,8 @@ object SqlResultTypes { def strictObject(column: String)(implicit sr: SqlRow) = sr.strictObject(column) def strictObjectOption(column: String)(implicit sr: SqlRow) = sr.strictObjectOption(column) def strictObject(column: String, map: Map[String, Class[_]])(implicit sr: SqlRow) = sr.strictObject(column, map) - def strictObjectOption(column: String, map: Map[String, Class[_]])(implicit sr: SqlRow) = sr.strictObjectOption(column, map) + def strictObjectOption(column: String, map: Map[String, Class[_]])(implicit sr: SqlRow) = + sr.strictObjectOption(column, map) def strictRef(column: String)(implicit sr: SqlRow) = sr.strictRef(column) def strictRefOption(column: String)(implicit sr: SqlRow) = sr.strictRefOption(column) def strictRowId(column: String)(implicit sr: SqlRow) = sr.strictRowId(column) diff --git a/relate/src/main/scala/com/lucidchart/relate/StatementPreparer.scala b/relate/src/main/scala/com/lucidchart/relate/StatementPreparer.scala index 7df9927..50023aa 100644 --- a/relate/src/main/scala/com/lucidchart/relate/StatementPreparer.scala +++ b/relate/src/main/scala/com/lucidchart/relate/StatementPreparer.scala @@ -12,46 +12,47 @@ private[relate] sealed trait StatementPreparer { /** * Execute the statement and close all resources - * @param callback the function to call on the results of the query - * @return whatever the callback returns + * @param callback + * the function to call on the results of the query + * @return + * whatever the callback returns */ def execute[A](callback: (SqlResult) => A): A = { try { val resultSet = results() try { callback(SqlResult(resultSet)) - } - finally { + } finally { resultSet.close() } - } - finally { + } finally { stmt.close() } } /** * Execute the query and close - * @return true if the first result is a ResultSet object; false if the first result is an update count or there is no result + * @return + * true if the first result is a ResultSet object; false if the first result is an update count or there is no + * result */ def execute(): Boolean = { try { stmt.execute() - } - finally { + } finally { stmt.close() } } /** * Execute the query and close - * @return the number of rows affected by the query + * @return + * the number of rows affected by the query */ def executeUpdate(): Int = { try { stmt.executeUpdate() - } - finally { + } finally { stmt.close() } } @@ -66,14 +67,15 @@ private[relate] trait BaseStatementPreparer extends StatementPreparer { protected def setTimeout(stmt: PreparedStatement): Unit = for { seconds <- timeout stmt <- Option(stmt) - } yield (stmt.setQueryTimeout(seconds)) + } yield stmt.setQueryTimeout(seconds) } private[relate] trait NormalStatementPreparer extends BaseStatementPreparer { /** * Get a PreparedStatement from this query with inserted parameters - * @return the PreparedStatement + * @return + * the PreparedStatement */ protected override def prepare(): PreparedStatement = { val stmt = connection.prepareStatement(parsedQuery) @@ -84,7 +86,8 @@ private[relate] trait NormalStatementPreparer extends BaseStatementPreparer { /** * Get the results of excutioning this statement - * @return the resulting ResultSet + * @return + * the resulting ResultSet */ override def results(): ResultSet = { stmt.executeQuery() @@ -92,9 +95,11 @@ private[relate] trait NormalStatementPreparer extends BaseStatementPreparer { } private[relate] trait InsertionStatementPreparer extends BaseStatementPreparer { + /** * Get a PreparedStatement from this query that will return generated keys - * @return the PreparedStatement + * @return + * the PreparedStatement */ protected override def prepare(): PreparedStatement = { val stmt = connection.prepareStatement(parsedQuery, Statement.RETURN_GENERATED_KEYS) @@ -105,7 +110,8 @@ private[relate] trait InsertionStatementPreparer extends BaseStatementPreparer { /** * Get the results of executing this insertion statement - * @return the ResultSet + * @return + * the ResultSet */ override def results(): ResultSet = { stmt.executeUpdate() @@ -118,7 +124,8 @@ private[relate] trait StreamedStatementPreparer extends BaseStatementPreparer { /** * Get a PreparedStatement from this query that will stream the resulting rows - * @return the PreparedStatement + * @return + * the PreparedStatement */ protected override def prepare(): PreparedStatement = { val stmt = connection.prepareStatement( @@ -130,8 +137,7 @@ private[relate] trait StreamedStatementPreparer extends BaseStatementPreparer { val driver = connection.getMetaData().getDriverName() if (driver.toLowerCase.contains("mysql")) { stmt.setFetchSize(Int.MinValue) - } - else { + } else { stmt.setFetchSize(fetchSize) } applyParams(stmt) @@ -140,8 +146,10 @@ private[relate] trait StreamedStatementPreparer extends BaseStatementPreparer { /** * Override the default execute method so that it does not close the resources - * @param callback the function to call on the results of the query - * @return whatever the callback returns + * @param callback + * the function to call on the results of the query + * @return + * whatever the callback returns */ override def execute[A](callback: (SqlResult) => A): A = { callback(SqlResult(results())) @@ -149,9 +157,10 @@ private[relate] trait StreamedStatementPreparer extends BaseStatementPreparer { /** * Get the results of executing this statement with a streaming ResultSet - * @return the ResultSet + * @return + * the ResultSet */ override def results(): ResultSet = { stmt.executeQuery() } -} \ No newline at end of file +} diff --git a/relate/src/test/scala/ColReaderTest.scala b/relate/src/test/scala/ColReaderTest.scala index e08e3d4..f689dc4 100644 --- a/relate/src/test/scala/ColReaderTest.scala +++ b/relate/src/test/scala/ColReaderTest.scala @@ -67,14 +67,14 @@ object RecordA extends Mockito { row.javaBigDecimalOption("jbd") returns Some(new java.math.BigDecimal(10)) row.javaBigIntegerOption("jbi") returns Some(java.math.BigInteger.valueOf(10)) row.boolOption("bool") returns Some(true) - row.byteArrayOption("ba") returns Some(Array[Byte](1,2,3)) - row.byteOption("byte") returns Some((1: Byte)) - row.dateOption("date") returns Some((new Date(timeMillis))) + row.byteArrayOption("ba") returns Some(Array[Byte](1, 2, 3)) + row.byteOption("byte") returns Some(1: Byte) + row.dateOption("date") returns Some(new Date(timeMillis)) row.instantOption("instant") returns Some(Instant.ofEpochMilli(timeMillis)) row.doubleOption("double") returns Some(1.1) row.intOption("int") returns Some(10) row.longOption("long") returns Some(100L) - row.shortOption("short") returns Some((5: Short)) + row.shortOption("short") returns Some(5: Short) row.stringOption("str") returns Some("hello") row.uuidOption("uuid") returns Some(uuid) row.intOption("thing") returns Some(1) @@ -167,7 +167,7 @@ class ColReaderTest extends Specification with Mockito { // Arrays use reference equality so we have to check this // independently of all the other values val bytes = parsed.ba - bytes === Array[Byte](1,2,3) + bytes === Array[Byte](1, 2, 3) parsed.copy(ba = null) mustEqual RecordA( bd = BigDecimal(10), @@ -220,7 +220,7 @@ class ColReaderTest extends Specification with Mockito { // Arrays use reference equality so we have to check this // independantly of all the other values val bytes: Array[Byte] = parsed.ba.get - bytes === Array[Byte](1,2,3) + bytes === Array[Byte](1, 2, 3) parsed.copy(ba = None) mustEqual RecordB( bd = Some(BigDecimal(10)), @@ -247,7 +247,8 @@ class ColReaderTest extends Specification with Mockito { "parse a byte array" in { val rs = mock[java.sql.ResultSet] val row = SqlRow(rs) - rs.getObject("col") returns Array[Byte]('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f') + rs.getObject("col") returns Array[Byte]('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', + 'f') ColReader.uuidReader.read("col", row) mustEqual Some(new UUID(3472611983179986487L, 4051376414998685030L)) } diff --git a/relate/src/test/scala/ExpandableSpec.scala b/relate/src/test/scala/ExpandableSpec.scala index 8260f1a..9414c40 100644 --- a/relate/src/test/scala/ExpandableSpec.scala +++ b/relate/src/test/scala/ExpandableSpec.scala @@ -4,43 +4,43 @@ import org.specs2.mutable._ class ExpandableSpec extends Specification { - //"The expand method" should { + // "The expand method" should { - // "work with one param" in { - // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids})").expand { implicit query => - // commaSeparated("ids", 3) - // }.query + // "work with one param" in { + // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids})").expand { implicit query => + // commaSeparated("ids", 3) + // }.query - // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2})" - // } + // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2})" + // } - // "work with multiple params" in { - // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids}) AND value IN ({values})").expand { implicit query => - // commaSeparated("values", 2) - // commaSeparated("ids", 3) - // }.query + // "work with multiple params" in { + // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids}) AND value IN ({values})").expand { implicit query => + // commaSeparated("values", 2) + // commaSeparated("ids", 3) + // }.query - // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2}) AND value IN ({values_0},{values_1})" - // } + // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2}) AND value IN ({values_0},{values_1})" + // } - // "work with a mix of expandable and normal 'on' parameters" in { - // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids}) AND name={name}").expand { implicit query => - // commaSeparated("ids", 3) - // }.query + // "work with a mix of expandable and normal 'on' parameters" in { + // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids}) AND name={name}").expand { implicit query => + // commaSeparated("ids", 3) + // }.query - // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2}) AND name={name}" - // } + // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2}) AND name={name}" + // } - // "work with chained 'expand' method calls" in { - // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids}) AND value IN ({values})").expand { implicit query => - // commaSeparated("values", 2) - // }.expand { implicit query => - // commaSeparated("ids", 3) - // }.query + // "work with chained 'expand' method calls" in { + // val query = ExpandableQuery("SELECT * FROM table WHERE id IN ({ids}) AND value IN ({values})").expand { implicit query => + // commaSeparated("values", 2) + // }.expand { implicit query => + // commaSeparated("ids", 3) + // }.query - // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2}) AND value IN ({values_0},{values_1})" - // } + // query must_== "SELECT * FROM table WHERE id IN ({ids_0},{ids_1},{ids_2}) AND value IN ({values_0},{values_1})" + // } - //} + // } -} \ No newline at end of file +} diff --git a/relate/src/test/scala/ParameterizationTest.scala b/relate/src/test/scala/ParameterizationTest.scala index 1abdd3f..1b88bc2 100644 --- a/relate/src/test/scala/ParameterizationTest.scala +++ b/relate/src/test/scala/ParameterizationTest.scala @@ -9,24 +9,25 @@ class ParameterizationTest extends Specification { "convert Array[Byte] into a single parameter" in { val byteArrayParam: Parameter = Array[Byte](5, 20, 34, 89, 110) val querySql = sql"INSERT INTO myTable (foo) VALUES ($byteArrayParam)" - querySql.toString mustEqual("INSERT INTO myTable (foo) VALUES (?)") + querySql.toString mustEqual "INSERT INTO myTable (foo) VALUES (?)" } "convert Array[Long] into a tuple" in { - val longArrayParam: Parameter = Array[Long](1l, 5l, 7l) + val longArrayParam: Parameter = Array[Long](1L, 5L, 7L) val querySql = sql"INSERT INTO myTable (foo) VALUES ($longArrayParam)" - querySql.toString mustEqual("INSERT INTO myTable (foo) VALUES (?,?,?)") + querySql.toString mustEqual "INSERT INTO myTable (foo) VALUES (?,?,?)" } } "tuple paramater" should { "use sub-parameter placeholders" in { class CustomParameter(value: Int) extends SingleParameter { - protected[this] def set(statement: PreparedStatement, i: Int) = implicitly[Parameterizable[Int]].set(statement, i, value) + protected[this] def set(statement: PreparedStatement, i: Int) = + implicitly[Parameterizable[Int]].set(statement, i, value) override def appendPlaceholders(stringBuilder: StringBuilder) = stringBuilder.append("?::smallint") } val querySql = sql"INSERT INTO myTable (foo, bar) VALUES (${(1, new CustomParameter(1))})" - querySql.toString mustEqual("INSERT INTO myTable (foo, bar) VALUES (?,?::smallint)") + querySql.toString mustEqual "INSERT INTO myTable (foo, bar) VALUES (?,?::smallint)" } } } diff --git a/relate/src/test/scala/RelateITSpec.scala b/relate/src/test/scala/RelateITSpec.scala index 7ebec99..af06e21 100644 --- a/relate/src/test/scala/RelateITSpec.scala +++ b/relate/src/test/scala/RelateITSpec.scala @@ -130,15 +130,13 @@ trait Db { } def deleteDb(): Unit = withConnection { implicit connection => - //sql"DROP DATABASE relate_it_tests".execute() + // sql"DROP DATABASE relate_it_tests".execute() } } - - class RelateITSpec extends Specification with Db { - override def map(tests: =>Fragments) = step(createDb) ^ tests ^ step(deleteDb) + override def map(tests: => Fragments) = step(createDb) ^ tests ^ step(deleteDb) def streamConnection = DriverManager.getConnection(url, props) def streamConnection2 = DriverManager.getConnection(url, props) @@ -187,7 +185,7 @@ class RelateITSpec extends Specification with Db { ) } - //check if statements all closed in all tests stmt.isClosed + // check if statements all closed in all tests stmt.isClosed "insert" should { @@ -199,38 +197,40 @@ class RelateITSpec extends Specification with Db { INSERT INTO pokedex (name, description) VALUES ($pokemonName, $pokemonDesc) """.execute() - //now check if that record was correctly inserted + // now check if that record was correctly inserted val entries = sql""" SELECT id, name, description FROM pokedex WHERE name = $pokemonName """.asList(pokedexParser) - (entries.size must_== 1) and (entries(0).name must_== pokemonName) and (entries(0).description must_== pokemonDesc) + (entries.size must_== 1) and (entries(0).name must_== pokemonName) and (entries( + 0 + ).description must_== pokemonDesc) } "work for multi-insert" in withConnection { implicit connection => - val pokedexId = 8 - val records = List( - (pokedexId, 16, Some(1L)), - (pokedexId, 5, None), - (pokedexId, 10, Some(2L)) - ) + val pokedexId = 8 + val records = List( + (pokedexId, 16, Some(1L)), + (pokedexId, 5, None), + (pokedexId, 10, Some(2L)) + ) sql""" INSERT INTO pokemon (pokedex_id, level, trainer_id) VALUES $records """.execute() - //check if those records were inserted + // check if those records were inserted val pokemon = sql""" SELECT id, pokedex_id, level, trainer_id FROM pokemon WHERE pokedex_id = $pokedexId """.asList(pokemonParser) - val levelAndTrainerIdFromDb = pokemon.map { pokemon => - (pokemon.level, pokemon.trainerId) - } - val levelAndTrainerIdFromTest = records.map { pokemon => - (pokemon._2, pokemon._3) - } - (pokemon.size must_== records.size) and (levelAndTrainerIdFromDb must_== levelAndTrainerIdFromTest) + val levelAndTrainerIdFromDb = pokemon.map { pokemon => + (pokemon.level, pokemon.trainerId) + } + val levelAndTrainerIdFromTest = records.map { pokemon => + (pokemon._2, pokemon._3) + } + (pokemon.size must_== records.size) and (levelAndTrainerIdFromDb must_== levelAndTrainerIdFromTest) } "be able to retrieve an autogenerated key" in withConnection { implicit connection => @@ -334,7 +334,7 @@ class RelateITSpec extends Specification with Db { WHERE id = -1 """.asSeq(pokedexParser) - (pokemon.size must_== 0) + pokemon.size must_== 0 } "work with asIterable" in withConnection { implicit connection => @@ -347,7 +347,9 @@ class RelateITSpec extends Specification with Db { """.asIterable(pokedexParser) val iterableAsList = pokemon.map(_.name) - (iterableAsList must contain(names(0))) and (iterableAsList must contain(names(1))) and (iterableAsList must contain(names(2))) + (iterableAsList must contain(names(0))) and (iterableAsList must contain( + names(1) + )) and (iterableAsList must contain(names(2))) } "work with empty asIterable" in withConnection { implicit connection => @@ -394,7 +396,9 @@ class RelateITSpec extends Specification with Db { (row.string("name"), row.string("description")) } - (pokemon(wartortle.name) must_== wartortle.description) and (pokemon(blastoise.name) must_== blastoise.description) + (pokemon(wartortle.name) must_== wartortle.description) and (pokemon( + blastoise.name + ) must_== blastoise.description) } "work for empty asMap" in withConnection { implicit connection => @@ -446,7 +450,9 @@ class RelateITSpec extends Specification with Db { WHERE id IN ($ids) """.asList(pokedexParser).map(_.name) - (pokemonNames must contain("Squirtle")) and (pokemonNames must contain("Wartortle")) and (pokemonNames must contain("Blastoise")) + (pokemonNames must contain("Squirtle")) and (pokemonNames must contain( + "Wartortle" + )) and (pokemonNames must contain("Blastoise")) } "fail to insert if given a select query" in withConnection { implicit connection => diff --git a/relate/src/test/scala/SqlResultSpec.scala b/relate/src/test/scala/SqlResultSpec.scala index 5ea9276..7b7f9ce 100644 --- a/relate/src/test/scala/SqlResultSpec.scala +++ b/relate/src/test/scala/SqlResultSpec.scala @@ -15,7 +15,7 @@ case class TestRecord( name: String ) -object TestRecord{ +object TestRecord { implicit val TestRecordRowParser = new RowParser[TestRecord] { def parse(row: SqlRow): TestRecord = TestRecord( row.long("id"), @@ -112,12 +112,16 @@ class SqlResultSpec extends Specification with Mockito { "return a list of 3 elements with an explicit parser" in { val (rs, _, result) = getMocks - rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 - rs.next returns true thenReturn true thenReturn true thenReturn false + rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 + rs.next returns true thenReturn true thenReturn true thenReturn false rs.getLong("id") returns (100L: java.lang.Long) rs.getString("name") returns "the name" - result.asList(parser) mustEqual List(TestRecord(100L, "the name"), TestRecord(100L, "the name"), TestRecord(100L, "the name")) + result.asList(parser) mustEqual List( + TestRecord(100L, "the name"), + TestRecord(100L, "the name"), + TestRecord(100L, "the name") + ) } "return an empty list with an explicit parser" in { @@ -132,12 +136,16 @@ class SqlResultSpec extends Specification with Mockito { "return a list of 3 elements with an implicit parser" in { val (rs, _, result) = getMocks - rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 - rs.next returns true thenReturn true thenReturn true thenReturn false + rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 + rs.next returns true thenReturn true thenReturn true thenReturn false rs.getLong("id") returns (100L: java.lang.Long) rs.getString("name") returns "the name" - result.asList[TestRecord] mustEqual List(TestRecord(100L, "the name"), TestRecord(100L, "the name"), TestRecord(100L, "the name")) + result.asList[TestRecord] mustEqual List( + TestRecord(100L, "the name"), + TestRecord(100L, "the name"), + TestRecord(100L, "the name") + ) } "return an empty list with an implicit parser" in { @@ -154,8 +162,8 @@ class SqlResultSpec extends Specification with Mockito { "return a map of 3 elements with an explicit parser" in { val (rs, _, result) = getMocks - rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 - rs.next returns true thenReturn true thenReturn true thenReturn false + rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 + rs.next returns true thenReturn true thenReturn true thenReturn false rs.getLong("id") returns 1L thenReturns 2L thenReturns 3L rs.getString("name") returns "the name" @@ -183,8 +191,8 @@ class SqlResultSpec extends Specification with Mockito { val (rs, _, result) = getMocks import java.lang.{Long => L} - rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 - rs.next returns true thenReturn true thenReturn true thenReturn false + rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 + rs.next returns true thenReturn true thenReturn true thenReturn false rs.getLong("id") returns (1: L) thenReturns (2: L) thenReturns (3: L) rs.getString("name") returns "the name" @@ -207,8 +215,8 @@ class SqlResultSpec extends Specification with Mockito { val (rs, _, result) = getMocks import java.lang.{Long => L} - rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 thenReturn 4 - rs.next returns true thenReturn true thenReturn true thenReturn true thenReturn false + rs.getRow returns 0 thenReturn 1 thenReturn 2 thenReturn 3 thenReturn 4 + rs.next returns true thenReturn true thenReturn true thenReturn true thenReturn false rs.getString("id") returns "1" thenReturns "2" thenReturns "1" thenReturns "2" rs.getString("name") returns "one" thenReturns "two" thenReturns "three" thenReturns "four" @@ -260,11 +268,10 @@ class SqlResultSpec extends Specification with Mockito { val id: Object = 12: java.lang.Integer rs.getObject("id") returns id - val nameOpt = row.extractOption("name") { any => any match { case x: String => x - case _ => "" + case _ => "" } } @@ -273,7 +280,7 @@ class SqlResultSpec extends Specification with Mockito { val idOpt = row.extractOption("id") { any => any match { case x: Int => x - case _ => 0 + case _ => 0 } } @@ -389,7 +396,7 @@ class SqlResultSpec extends Specification with Mockito { "properly pass through the call to ResultSet" in { val (rs, row, _) = getMocks - val res: Array[Byte] = Array(1,2,3) + val res: Array[Byte] = Array(1, 2, 3) rs.getBytes("strictBytes") returns res row.strictBytes("strictBytes") mustEqual res row.strictBytesOption("strictBytes") must beSome(res) @@ -540,7 +547,7 @@ class SqlResultSpec extends Specification with Mockito { row.strictObject("strictObject") mustEqual res row.strictObjectOption("strictObject") must beSome(res) - val map = Map[String,Class[_]]() + val map = Map[String, Class[_]]() rs.getObject("strictObject", map.asJava) returns res row.strictObject("strictObject", map) mustEqual res row.strictObjectOption("strictObject", map) must beSome(res) @@ -678,7 +685,7 @@ class SqlResultSpec extends Specification with Mockito { "return Some(0) if the value in the database was really 0" in { val (rs, row, _) = getMocks - val res = 0 : java.lang.Integer + val res = 0: java.lang.Integer rs.getInt("int") returns res rs.wasNull returns false row.intOption("int") must beSome(res) @@ -865,7 +872,7 @@ class SqlResultSpec extends Specification with Mockito { "return the correct value" in { val (rs, row, _) = getMocks - val res = Array[Byte]('1','2','3') + val res = Array[Byte]('1', '2', '3') rs.getObject("byteArray") returns res row.byteArray("byteArray") mustEqual res row.byteArrayOption("byteArray") must beSome(res)