val stoppedAt = column[List[Instant]]("stopped_at »)
and I get the following exception when compiling : could not find implicit value for parameter tt: slick.ast.TypedType[List[java.time.Instant]]
[error] val stoppedAt = column[List[Instant]]("stopped_at »)
import java.time.Instant
import com.github.tminglei.slickpg._
import slick.basic.Capability
import slick.jdbc.JdbcCapabilities
private[taskmanager] trait PgDriver
extends ExPostgresProfile
with PgArraySupport
with PgDateSupport
with PgDate2Support
with PgJsonSupport
with PgRangeSupport
with PgHStoreSupport
with PgCirceJsonSupport {
def pgjson = "jsonb"
// Add back `capabilities.insertOrUpdate` to enable native `upsert` support; for postgres 9.5+
override protected def computeCapabilities: Set[Capability] =
super.computeCapabilities + JdbcCapabilities.insertOrUpdate
override val api: API = new API {}
trait API
extends super.API
with ArrayImplicits
with SimpleDateTimeImplicits
with DateTimeImplicits
with SimpleJsonImplicits
with RangeImplicits
with HStoreImplicits
with CirceImplicits
with CirceJsonPlainImplicits
with SimpleArrayPlainImplicits
with Date2DateTimeImplicitsDuration
with SimpleJsonPlainImplicits
with SimpleRangePlainImplicits
with SimpleHStorePlainImplicits
// implicit val strListTypeMapper = new SimpleArrayJdbcType[String]("text").to(_.toList)
// implicit val simpleIntstantListTypeMapper =
// new SimpleArrayJdbcType[Instant]("instant").to(_.toList)
// }
}
private[taskmanager] object PgDriver extends PgDriver
"com.typesafe.slick" %% "slick" % "3.1.1",
"com.typesafe.slick" %% "slick-hikaricp" % "3.1.1",
"org.slf4j" % "slf4j-nop" % "1.6.4", // TODO change to appropriate logging library
"com.github.tminglei" %% "slick-pg" % "0.14.5"
Hi! I'm trying to use type Point in my model, but I'm not able to make it working
package models
import java.sql.Timestamp
import util.MyPostgresDriver.api._
import play.api.libs.json._
import play.api.libs.functional.syntax._
object Company {
implicit val messageReads: Reads[Company] = (
(JsPath \ "id").readNullable[Int] and
(JsPath \ "companyId").read[String] and
(JsPath \ "name").read[String] and
(JsPath \ "location").read[Point] and
(JsPath \ "createdAt").read[Long].map{ long => new Timestamp(long) } and
(JsPath \ "updatedAt").read[Long].map{ long => new Timestamp(long) }
)(Company.apply _)
implicit val messageWrites: Writes[Company] = (
(JsPath \ "id").write[Int] and
(JsPath \ "companyId").write[String] and
(JsPath \ "name").write[String] and
(JsPath \ "location").write[Point] and
(JsPath \ "createdAt").write[Long].contramap{ (a: Timestamp) => a.getTime } and
(JsPath \ "updatedAt").write[Long].contramap{ (a: Timestamp) => a.getTime }
)(unlift(Company.unapply _))
}
case class Company(
id: Option[Int],
companyId: String,
name: String,
location: Point,
createdAt: Timestamp,
updatedAt: Timestamp
)
"Cannot resolve symbol Point"
MyPostgresDriver:
package util
import com.github.tminglei.slickpg._
trait MyPostgresDriver extends ExPostgresDriver
with PgArraySupport
with PgDate2Support
with PgPlayJsonSupport
with PgNetSupport
with PgLTreeSupport
with PgRangeSupport
with PgHStoreSupport
with PgPostGISSupport
with PgSearchSupport {
override val pgjson = "jsonb"
///
override val api = new API with ArrayImplicits
with DateTimeImplicits
with PostGISImplicits
with PlayJsonImplicits
with NetImplicits
with LTreeImplicits
with RangeImplicits
with HStoreImplicits
with SearchImplicits
with SearchAssistants {}
}
object MyPostgresDriver extends MyPostgresDriver
Anything I'm doing wrong here?
Thanks for your help!
<>
and Geodummy.unapply
import MyPostgresDriver.api._
case class Geodummy(dummy_id: Int, geom: Polygon)
class Geodummys(tag: Tag) extends Table[Geodummy](tag, "geodummy"){
def dummy_id = column[Int]("dummy_id")
def geom = column[Polygon]("geom")
def * = (dummy_id, geom) <> (Geodummy.tupled, Geodummy.unapply)
}
trait CustomPostgresDriver extends PostgresProfile {
def pgjson = "jsonb"
override val api = CustomAPI
object CustomAPI extends API
}
object CustomPostgresDriver extends CustomPostgresDriver
case class MyType(value: LocalDateTime)
class TestTableWithOptMyType(tag: Tag)
extends Table[Option[MyType]](tag, "TestTableOptMyType") {
def optMyTypeColumn = column[Option[MyType]]("opt_my_type")
def * = optMyTypeColumn
}
class TestTableWithOptDt(tag: Tag)
extends Table[Option[LocalDateTime]](tag, "TestTableOptDt") {
def optDtTypeColumn = column[Option[LocalDateTime]]("opt_dt_type")
def * = optDtTypeColumn
}
implicit def LocalDateTimeMapper: BaseColumnType[LocalDateTime] =
MappedColumnType.base[LocalDateTime, Timestamp](
x => Timestamp.valueOf(x),
x => x.toLocalDateTime
)
implicit def MyTypeMapper: BaseColumnType[MyType] =
MappedColumnType.base[MyType, Timestamp](
x => Timestamp.valueOf(x.value),
x => MyType(x.toLocalDateTime)
)
Hi all! Multi part question for y'all.
1) In a function where I have a scalar array parameter p
of type Rep[List[X]]
, what's the "preferred" way of turning this into a Query[X, Rep[X], Seq
? It isn't just p.unnest
, apparently, which has type Rep[X]
.
Looking at examples, it looks like Query(true).map(_ => p.unnest)
, which does have the right signature (albeit is a little weird).
2) Let's say i have 2 array parameters p1
and p2
of the same cardinality. I want "a zip join of the unnested arrays". Pairs (a, b)
where a
from p1
, and b
from p2
, where the indices for a
and b
match. How should i do this? Hint - It's not:
Query(true).map(_ => p1.unnest).zip(Query(true).map(_ => p2.unnest))
since this leads to a rather surprising sql behavior (at least on postgres) related to the ROW_NUMBER
function being used for the zip join.
WITH ORDINALITY
https://www.postgresql.org/docs/current/static/functions-srf.html
PgDate2Support
is availabe for slick 3.1.1 as an addon, you can use libraryDependencies += "com.github.tminglei" %% "slick-pg_date2" % "0.14.6"
to refer it. But in master for slick 3.2, I merged it into the main jar.
select ARRAY(select stringfield from foos)
yet.