Operations
Query API
There are various query APIs. All of them (single
, first
, list
and foreach
) will execute java.sql.PreparedStatement#executeQuery()
.
Single / Optional Result for Query
single
returns matched single row as an Option
value. If unexpectedly multiple rows are returned, runtime exception will be thrown.
import scalikejdbc._
val id = 123
// simple example
val name: Option[String] = DB readOnly { implicit session =>
sql"select name from emp where id = ${id}".map(rs => rs.string("name")).single.apply()
}
// defined mapper as a function
val nameOnly = (rs: WrappedResultSet) => rs.string("name")
val name: Option[String] = DB readOnly { implicit session =>
sql"select name from emp where id = ${id}".map(nameOnly).single.apply()
}
// define a class to map the result
case class Emp(id: String, name: String)
val emp: Option[Emp] = DB readOnly { implicit session =>
sql"select id, name from emp where id = ${id}"
.map(rs => Emp(rs.string("id"), rs.string("name"))).single.apply()
}
// QueryDSL
object Emp extends SQLSyntaxSupport[Emp] {
def apply(e: ResultName[Emp])(rs: WrappedResultSet): Emp = new Emp(id = rs.get(e.id), name = rs.get(e.name))
}
val e = Emp.syntax("e")
val emp: Option[Emp] = DB readOnly { implicit session =>
withSQL { select.from(Emp as e).where.eq(e.id, id) }.map(Emp(e.resultName)).single.apply()
}
You can learn about QueryDSL in detail here:
First Result from Multiple Results
first
returns the first row of matched rows as an Option
value.
val name: Option[String] = DB readOnly { implicit session =>
sql"select name from emp".map(rs => rs.string("name")).first.apply()
}
val e = Emp.syntax("e")
val name: Option[String] = DB readOnly { implicit session =>
withSQL { select(e.result.name).from(Emp as e) }.map(_.string(e.name)).first.apply()
}
List Results
list
returns matched multiple rows as scala.collection.immutable.List
.
val name: List[String] = DB readOnly { implicit session =>
sql"select name from emp".map(rs => rs.string("name")).list.apply()
}
val e = Emp.syntax("e")
val name: Option[String] = DB readOnly { implicit session =>
withSQL { select(e.result.name).from(Emp as e) }.map(_.string(e.name)).list.apply()
}
Foreach Operation
foreach
allows you to make some side-effect in iterations. This API is useful for handling large ResultSet
.
DB readOnly { implicit session =>
sql"select name from emp".foreach { rs =>
out.write(rs.string("name"))
}
}
val e = Emp.syntax("e")
DB readOnly { implicit session =>
withSQL { select(e.name).from(Emp as e) }.foreach { rs =>
out.write(rs.string(e.name))
}
}
Setting JDBC fetchSize
For instance, the PostgreSQL JDBC driver does infinite(!) caching for result sets if fetchSize is set to 0 (the default) and this causes memory problems.
http://docs.oracle.com/javase/8/docs/api/java/sql/Statement.html#setFetchSize-int-
You can specify JDBC fetchSize as follows since version 2.0.5.
val e = Emp.syntax("e")
DB readOnly { implicit session =>
sql"select name from emp"
.fetchSize(1000)
.foreach { rs => out.write(rs.string("name")) }
}
Or it’s also fine to set fetchSize to scalikejdbc.DBSession
.
val (e, c) = (Emp.syntax("e"), Cmp.syntax("c"))
DB readOnly { implicit session =>
session.fetchSize(1000)
withSQL { select(e.name).from(Emp as e) }.foreach { rs =>
out.write(rs.string(e.name)
}
withSQL { select(c.name).from(Cmp as c) }.foreach { rs =>
out.write(rs.string(c.name))
}
}
Implementing Custom Extractor
In some cases, you might need to implement your own custom extractor. This can be useful for testing out queries.
This example shows how to preserve null
values in a result set.
def toMap(rs: WrappedResultSet): Map[String, Any] = {
(1 to rs.metaData.getColumnCount).foldLeft(Map[String, Any]()) { (result, i) =>
val label = rs.metaData.getColumnLabel(i)
Some(rs.any(label)).map { nullableValue => result + (label -> nullableValue) }.getOrElse(result)
}
}
sql"select * from emp".map(rs => toMap(rs)).single.apply()
Using ParameterBinder
ParameterBinder
enables to customize how parameters should be bound to PreparedStatement
object inside ScalikeJDBC.
The following example shows how to use PreparedStatement#setBinaryStream
when binding InputStream
value to PreparedStatement
.
sql"create table blob_example (id bigint, data blob)").execute.apply()
val bytes = scala.Array[Byte](1, 2, 3, 4, 5, 6, 7)
val in = new ByteArrayInputStream(bytes)
val bytesBinder = ParameterBinder(
value = in,
binder = (stmt: PreparedStatement, idx: Int) => stmt.setBinaryStream(idx, in, bytes.length)
)
sql"insert into blob_example (data) values (${bytesBinder})").update.apply()
Using TypeBinder
TypeBinder[A]
is a type class to extract type A
values from Result.
The following example shows how to bind long value to MemberId
value.
import scalikejdbc._
import java.sql.ResultSet
implicit val session = AutoSession
// prepare data
sql"create table member (id bigint not null primary key, name varchar(100))".execute.apply()
sql"insert into member values (1, 'Alice')".update.apply()
sql"insert into member values (2, 'Bob') ".update.apply()
sql"insert into member values (3, 'Chris')".update.apply()
case class MemberId(id: Long)
case class Member(id: MemberId, name: String)
// compilation error here
val ids: Seq[MemberId] = sql"select * from member".map(_.get[MemberId]("id")).list.apply()
// <console>:24: error: could not find implicit value for evidence parameter of type scalikejdbc.TypeBinder[MemberId]
// val ids: Seq[MemberId] = sql"select * from member".map(_.get[MemberId]("id")).list.apply()
// ^
Your MemberId
class is not supported by default. You can see what types are supported by default here.
So you need to define your own TypeBinder
for MemberId
type as follows.
// define TypeBinder[MemberId] for implicit value
implicit val memberIdTypeBinder: TypeBinder[MemberId] = new TypeBinder[MemberId] {
def apply(rs: ResultSet, label: String): MemberId = MemberId(rs.getLong(label))
def apply(rs: ResultSet, index: Int): MemberId = MemberId(rs.getLong(index))
}
val ids: Seq[MemberId] = sql"select id from member".map(_.get[MemberId]("id")).list.apply()
// ids: Seq[MemberId] = List(MemberId(1), MemberId(1), MemberId(2), MemberId(3))
Of course, it’s also possible to simply deal with this case without TypeBinder
.
val ids: Seq[MemberId] = sql"select id from member".map(rs => MemberId(rs.get("id"))).list.apply()
Using Java SE 8 Date Time API (JSR-310)
ScalikeJDBC still supports Java SE 7. So we provides Java 8 Date Time API support as an optional module.
libraryDependencies += "org.scalikejdbc" %% "scalikejdbc-jsr310" % "2.5.2"
Usage is so simple. Just add import scalikejdbc.jsr310._
.
import scalikejdbc._, jsr310._
import java.time._
case class Group(
id: Long,
name: Option[String],
createdAt: ZonedDateTime)
object Group extends SQLSyntaxSupport[Group] {
def apply(g: SyntaxProvider[Group])(rs: WrappedResultSet): Group = apply(g.resultName)(rs)
def apply(g: ResultName[Group])(rs: WrappedResultSet): Group = new Group(
id = rs.get(g.id),
name = rs.get(g.name),
createdAt = rs.get(g.createdAt)
// (or) createdAt = rs.get[ZonedDateTime](g.createdAt)
// (or) createdAt = rs.zonedDateTime(g.createdAt)
)
}
Update API
update
executes java.sql.PreparedStatement#executeUpdate()
.
import scalikejdbc._
DB localTx { implicit session =>
sql"""insert into emp (id, name, created_at) values (${id}, ${name}, ${DateTime.now})"""
.update.apply()
val id = sql"insert into emp (name, created_at) values (${name}, current_timestamp)"
.updateAndReturnGeneratedKey.apply()
sql"update emp set name = ${newName} where id = ${id}".update.apply()
sql"delete emp where id = ${id}".update.apply()
}
val column = Emp.column
DB localTx { implicit s =>
withSQL {
insert.into(Emp).namedValues(
column.id -> id,
column.name -> name,
column.createdAt -> DateTime.now)
}.update.apply()
val id: Long = withSQL {
insert.into(Empy).namedValues(column.name -> name, column.createdAt -> sqls.currentTimestamp)
}.updateAndReturnGeneratedKey.apply()
withSQL { update(Emp).set(column.name -> newName).where.eq(column.id, id) }.update.apply()
withSQL { delete.from(Emp).where.eq(column.id, id) }.update.apply()
}
Execute API
execute
executes java.sql.PreparedStatement#execute()
.
DB autoCommit { implicit session =>
sql"create table emp (id integer primary key, name varchar(30))".execute.apply()
}
// QueryDSL doesn't support DDL yet.
Batch API
batch
and batchByName
executes java.sql.PreparedStatement#executeBatch()
.
import scalikejdbc._
DB localTx { implicit session =>
val batchParams: Seq[Seq[Any]] = (2001 to 3000).map(i => Seq(i, "name" + i))
sql"insert into emp (id, name) values (?, ?)".batch(batchParams: _*).apply()
}
DB localTx { implicit session =>
sql"insert into emp (id, name) values ({id}, {name})"
.batchByName(Seq(Seq('id -> 1, 'name -> "Alice"), Seq('id -> 2, 'name -> "Bob")):_*)
.apply()
}
val column = Emp.column
DB localTx { implicit session =>
val batchParams: Seq[Seq[Any]] = (2001 to 3000).map(i => Seq(i, "name" + i))
withSQL {
insert.into(Emp).namedValues(column.id -> sqls.?, column.name -> sqls.?)
}.batch(batchParams: _*).apply()
}