`import com.github.tminglei.slickpg._
trait MyPostgresDriver
extends ExPostgresProfile
with PgArraySupport
with PgDateSupportJoda
with PgEnumSupport
with PgRangeSupport
with PgHStoreSupport
with PgSearchSupport
with PgPostGISSupport {
override val api = new MyAPI {}
trait MyAPI
extends API
with ArrayImplicits
with DateTimeImplicits
with RangeImplicits
with HStoreImplicits
with SearchImplicits
with PostGISImplicits
with SearchAssistants
}
object MyPostgresDriver extends MyPostgresDriver`
Bascially none of the PG types here are found.
org.postgresql.util.PSQLException: ERROR: function st_dwithin(point, bytea, double precision) does not exist
[info] Hint: No function matches the given name and argument types. You might need to add explicit type casts.
[info] Position: 140
def byDistance(point: Point, distance: Double): Future[Seq[House]] =
db run {
houses
.filter(r => r.location.dWithin(point.bind, distance.bind))
.result
.map(t => t)
}
due to: org.postgresql.util.PSQLException: ERROR: column "location" is of type point but expression is of type bytea
[info] Hint: You will need to rewrite or cast the expression.
[info] Position: 157
def distance[P2, R](geom: Rep[P2])(implicit om: o#to[Float, R]) = {
om.column(GeomLibrary.Distance, n, geom.toNode)
}
<->
with hyphened string in to_tsquery
function.drop table if exists orders cascade;
CREATE TABLE orders (
ID serial NOT NULL PRIMARY KEY,
info json NOT NULL
);
INSERT INTO orders (info)
VALUES
('{"interestedIn":[11],"countries":["IT", "UK"]}'),
('{"interestedIn":[12],"countries":["US", "UK"]}'),
('{"interestedIn":[1,2,3],"countries":["UK"]}'),
('{"interestedIn":[14,15,16],"countries":["DE", "UK"]}');
select *
from orders
where info::jsonb -> 'countries' ? 'US'
;
Hi All, we're trying to get upserts working with slick. Trying to use slick-pg to enable this, but we can't find how to access the new methods. We created our own profile that extends ExPostgresProfile
, and it looks like it's configured OK in application.conf. We replaced import profile.api._
with import MySlickPostgresProfile.api._
However, our methods that use testtable.insertOrUpdate still seems to use the old version, and .insertOrUpdateAll
isn't found. Is there some weird implicit scala thing we need to do?
Option[scala.collection.Seq]
which is a type-constructor and not a type, hence the compilation fails
Option[List[String]]
- which seems consistent with array columns declaration for slick-pg, as of https://github.com/tminglei/slick-pg#configurable-typemappers - then the generated code comes as expected, but the compiler hangs in some non-terminating loop when trying to compile the project
I am new to slick and i am trying to convert a sql query to slick and need help for the same.
My table is:
final case class TableExp(
id: String,
key: String,
value: String
)
final class TableExp(tag: Tag)(implicit val schema: String)
extends Table[TableExp](tag, Some(schema), "table") {
def id = column[String]("id")
def key = column[String]("key")
def value = column[String]("value")
def * =
(id,
key,
value
).shaped <> (TableExp.tupled, TableExp.unapply)
def getData(id: String) =
{
TableQuery[TableExp].filter(res => res.id === id || res.id === "id1").map(_.key).distinct
}
}
And the sql query is:
select distinct key, first_value(value) over (partition by key order by case when t.id=$id then 0 else 1 end) value from table t where t.id in ($id, 'id1')) t
So far i have tried this:
def getData(id: String) =
{
TableQuery[TableExp].filter(res => res.id === id || res.id === "id1").groupBy(_.key).map
{
case (a,b) => Over.partitionBy(b.map(_.key))
}
}
CREATE TABLE mytable(myelements JSONB DEFAULT '[]'::JSONB);
SELECT myelement
FROM mytable,
jsonb_array_elements(mytable.myelements) myelement;
Hi @all I am getting this very wired exception when added the
val slickPgCirciJson = "com.github.tminglei" %% "slick-pg_circe-json" % "0.19.3"
[error] java.lang.NoSuchMethodError: com.github.tminglei.slickpg.array.PgArrayJdbcTypes$SimpleArrayJdbcType.to(Lscala/Function1;Lscala/reflect/ClassTag;)Lslick/jdbc/JdbcTypesComponent$DriverJdbcType;
and this is how my database class looks like -
trait DatabaseApi extends ExPostgresProfile
with PgArraySupport
with PgDate2Support
with PgRangeSupport
with PgHStoreSupport
with PgCirceJsonSupport
with PgSearchSupport
with PgPostGISExtensions
with PgNetSupport
with PgLTreeSupport {
override val api = PostgresAPI
object PostgresAPI
extends API
with ArrayImplicits
with DateTimeImplicits
with JsonImplicits
with NetImplicits
with LTreeImplicits
with RangeImplicits
with HStoreImplicits
with SearchImplicits
with SearchAssistants
with CirceImplicits {
val coalesce: (Rep[Option[Long]], Rep[Option[Long]]) => Rep[Option[Long]] =
SimpleFunction.binary[Option[Long], Option[Long], Option[Long]](
"coalesce")
implicit val strListTypeMapper: DriverJdbcType[List[String]] =
new SimpleArrayJdbcType[String]("text")
.to(_.toList)
def pgIntervalStr2Interval(intervalStr: String): Interval =
Interval.fromPgInterval(new PGInterval(intervalStr))
}
}
object DatabaseApi extends DatabaseApi {
def pgjson = "jsonb" // jsonb support is in postgres 9.4.0 onward; for 9.3.x use "json"
// Add back capabilities.insertOrUpdate to enable native upsert support; for postgres 9.5+
override protected def computeCapabilities: Set[Capability] =
super.computeCapabilities + JdbcCapabilities.insertOrUpdate
}
:wave: - I ran into an issue with \u0000
characters not being stripped out when using the default PgJsonSupport.
I noticed that the serde-specific artifacts (play-json, circe, etc.) already strip this character out via JsonUtils.clean, would it make sense to add that same stripping logic to PgJsonSupport
as well? I'd be happy to open a PR to make that change!