INFO org.locationtech.geomesa.kafka.data.KafkaDataStore: Kafka consumers disabled for this data store instance
Feature type created - register the layer 'tdrive-quickstart' in geoserver with bounds: MinX[116.22366] MinY[39.72925] MaxX[116.58804] MaxY[40.09298]
Press <enter> to continue
WARN org.locationtech.geomesa.kafka.data.KafkaCacheLoader$KafkaCacheLoaderImpl: Consumer [0] error receiving message from topic geomesa-ds-kafka-tdrive-quickstart:
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.locationtech.geomesa.kafka.KafkaConsumerVersions$$anonfun$fromDuration$1$1$$anonfun$applyOrElse$1.apply(KafkaConsumerVersions.scala:53)
at org.locationtech.geomesa.kafka.KafkaConsumerVersions$$anonfun$fromDuration$1$1$$anonfun$applyOrElse$1.apply(KafkaConsumerVersions.scala:53)
at org.locationtech.geomesa.kafka.KafkaConsumerVersions$.poll(KafkaConsumerVersions.scala:25)
at org.locationtech.geomesa.kafka.consumer.ThreadedConsumer$ConsumerRunnable.run(ThreadedConsumer.scala:53)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
Caused by: java.lang.IllegalStateException: No entry found for connection 2147482646
at org.apache.kafka.clients.ClusterConnectionStates.nodeState(ClusterConnectionStates.java:339)
at org.apache.kafka.clients.ClusterConnectionStates.disconnected(ClusterConnectionStates.java:143)
at org.apache.kafka.clients.NetworkClient.initiateConnect(NetworkClient.java:921)
at org.apache.kafka.clients.NetworkClient.ready(NetworkClient.java:287)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.tryConnect(ConsumerNetworkClient.java:548)
at org.apache.kafka.clients.consumer.internals.AbstractCoordinator$FindCoordinatorResponseHandler.onSuccess(AbstractCoordinator.java:655)
at org.apache.kafka.clients.consumer.internals.AbstractCoordinator$FindCoordinatorResponseHandler.onSuccess(AbstractCoordinator.java:635)
at org.apache.kafka.clients.consumer.internals.RequestFuture$1.onSuccess(RequestFuture.java:204)
at org.apache.kafka.clients.consumer.internals.RequestFuture.fireSuccess(RequestFuture.java:167)
at org.apache.kafka.clients.consumer.internals.RequestFuture.complete(RequestFuture.java:127)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient$RequestFutureCompletionHandler.fireCompletion(ConsumerNetworkClient.java:575)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.firePendingCompletedRequests(ConsumerNetworkClient.java:389)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:297)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:236)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:215)
at org.apache.kafka.clients.consumer.internals.AbstractCoordinator.ensureCoordinatorReady(AbstractCoordinator.java:231)
at org.apache.kafka.clients.consumer.internals.ConsumerCoordinator.poll(ConsumerCoordinator.java:316)
at org.apache.kafka.clients.consumer.KafkaConsumer.updateAssignmentMetadataIfNeeded(KafkaConsumer.java:1214)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1179)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1164)
... 11 more
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.GeneratedMethodAccessor191.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.wicket.RequestListenerInterface.internalInvoke(RequestListenerInterface.java:258)
... 123 more
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.curator.utils.Compatibility
at org.apache.curator.framework.CuratorFrameworkFactory$Builder.<init>(CuratorFrameworkFactory.java:153)
at org.apache.curator.framework.CuratorFrameworkFactory$Builder.<init>(CuratorFrameworkFactory.java:134)
at org.apache.curator.framework.CuratorFrameworkFactory.builder(CuratorFrameworkFactory.java:82)
at org.locationtech.geomesa.utils.zk.CuratorHelper$.client(CuratorHelper.scala:23)
at org.locationtech.geomesa.utils.zk.ZookeeperMetadata.<init>(ZookeeperMetadata.scala:21)
at org.locationtech.geomesa.kafka.data.KafkaDataStoreFactory.createDataStore(KafkaDataStoreFactory.scala:48)
at org.locationtech.geomesa.kafka.data.KafkaDataStoreFactory.createDataStore(KafkaDataStoreFactory.scala:38)
at org.vfny.geoserver.util.DataStoreUtils.getDataAccess(DataStoreUtils.java:66)
at org.geoserver.catalog.ResourcePool.getDataStore(ResourcePool.java:626)
at org.geoserver.catalog.impl.DataStoreInfoImpl.getDataStore(DataStoreInfoImpl.java:34)
at org.geoserver.web.data.store.DataAccessNewPage.onSaveDataStore(DataAccessNewPage.java:90)
at org.geoserver.web.data.store.AbstractDataAccessPage$1.onSubmit(AbstractDataAccessPage.java:168)
at org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink$1.onSubmit(AjaxSubmitLink.java:111)
at org.apache.wicket.ajax.form.AjaxFormSubmitBehavior$AjaxFormSubmitter.onSubmit(AjaxFormSubmitBehavior.java:218)
at org.apache.wicket.markup.html.form.Form.delegateSubmit(Form.java:1312)
at org.apache.wicket.markup.html.form.Form.process(Form.java:976)
at org.apache.wicket.markup.html.form.Form.onFormSubmitted(Form.java:797)
at org.apache.wicket.ajax.form.AjaxFormSubmitBehavior.onEvent(AjaxFormSubmitBehavior.java:174)
at org.apache.wicket.ajax.AjaxEventBehavior.respond(AjaxEventBehavior.java:155)
at org.apache.wicket.ajax.AbstractDefaultAjaxBehavior.onRequest(AbstractDefaultAjaxBehavior.java:601)
Hey Everyone , i was digging into geomesa API from github and i coulnt figure out why geomessa cassandra IndexWriter throw blow error when i tried to add new feature to cassandra data store
Error while write data;Error indexing feature '4846901:97399|0|27768|1|{"area":5.77998,"plusclasscode":32,"mainclasscode":"C","classcode":32,"treecover":85.6,"confidence":0.7,"imperviousness":null}|{"ndvi":null,"perimeter":1564.695}|1|0|2021-12-15T13:45:25.000Z|32|C|32|4846901|POLYGON ((29.7420806 39.9518753, 29.7421197 39.9517979, 29.7421861 39.9516804, 29.7422637 39.9515877, 29.7423589 39.9515469, 29.7424704 39.9515576, 29.742565 39.9515816, 29.7426113 39.9515639, 29.7425807 39.9514973, 29.7424879 39.9514228, 29.7423922 39.9513601, 29.7423275 39.9513054, 29.742306 39.9512512, 29.7422341 39.9511941, 29.7421565 39.9511382, 29.74213 39.9510882, 29.7421523 39.9510446, 29.7421587 39.9510017, 29.742118 39.9509557, 29.7420545 39.9508974, 29.7420047 39.9508266, 29.7420057 39.9507665, 29.7420664 39.9507515, 29.7421645 39.9507502, 29.7422586 39.9506855, 29.7422912 39.9505525, 29.7422694 39.9504317, 29.7422636 39.9503568, 29.7423221 39.9503126, 29.7424422 39.9502868, 29.7425841 39.9502919, 29.7426968 39.950328, 29.7428184 39.9503811, 29.7430274 39.9504524, 29.7433069 39.9505368, 29.743519 39.9505895, 29.7436159 39.9505393, 29.7436344 39.9503878, 29.7436056 39.9503465, 29.743554 39.950339, 29.7434936 39.9503392, 29.7434343 39.9502887, 29.7433805 39.950197, 29.7433264 39.9501459, 29.7432708 39.950165, 29.743205 39.9502166, 29.7431181 39.950257, 29.7430002 39.9502553, 29.7428568 39.9502124, 29.7426921 39.9501553, 29.7425191 39.9501114, 29.7423164 39.9501046, 29.7420709 39.9501327, 29.7418095 39.950174, 29.7416251 39.9502182, 29.7415504 39.9502621, 29.7415628 39.9503078, 29.7416078 39.9503521, 29.7416181 39.9503939, 29.7415595 39.9504344, 29.7414338 39.9504906, 29.7412652 39.9505782, 29.741106 39.9506871, 29.7409788 39.9507753, 29.7408899 39.9508396, 29.7408238 39.9509038, 29.7407656 39.9509786, 29.7407059 39.9510459, 29.7406459 39.9511008, 29.7405847 39.9511445, 29.7405253 39.9511701, 29.7404666 39.9511628, 29.7404095 39.9511437, 29.7403497 39.9511624, 29.7402886 39.9512267, 29.740226 39.9512974, 29.7401645 39.9513561, 29.7401033 39.9514037, 29.7400484 39.9514372, 29.7400025 39.9514351, 29.7399526 39.9513876, 29.7398433 39.9513082, 29.7396255 39.9512283, 29.7393638 39.951197, 29.7392367 39.9512253, 29.7392784 39.9512782, 29.7393785 39.9513161, 29.7394645 39.9513154, 29.7395316 39.9512999, 29.7395996 39.9513214, 29.7396624 39.9513873, 29.7396763 39.9514573, 29.7395792 39.9515163, 29.7393947 39.9515774, 29.7392348 39.9516545, 29.7391413 39.9517414, 29.7391123 39.9517918, 29.7399385 39.9518151, 29.7420806 39.9518753))|2022-03-07T12:28:39.000Z';: java.lang.RuntimeException: Error indexing feature
When i checked geom and features nothing is different then regular fetures was succesfully added . The case is its throws runtime error .
as i checked its not illegal argument. its runtime error which has nothing to do with features i created. What can cause that issue. Thank you
(bbox (geom, 0,0,1,1) and date during T1 / T2) OR ( bbox (geom, 2,2,3,3) and date during T3 / T4)
. This statement will use the permutation and combination ofbbox (0,0,1,1)
bbox (2,2,3,3)
and date during T1 / T2
date during T4 / T4
to generate scan range.The back-edn is hbase,geomesa version is 3.0.0.I'm not sure if there are any improvements in the new versions
Hi guys. I successfully used Geomesa until today. I want to ingest parquet file and thing i am missing is it possible to set id-field
as md5
of avroPath
of some fields. I only had success with uuid
function as id-field
, hence I want to use md5 hash of few fields. I have no idea how to access fields in id-field. Usually I used avroPath($0,'/example')
which works well in all other cases.
Here is my schema + converter definition.
{
"geomesa" : {
"sfts" : {
"example" : {
"fields" : [
{ "name" : "color", "type" : "String" }
{ "name" : "number", "type" : "Long" }
{ "name" : "height", "type" : "String" }
{ "name" : "weight", "type" : "Double" }
{ "name" : "geom", "type" : "Point", "srid" : 4326 }
]
}
},
"converters" : {
"example" : {
"type" : "parquet",
"id-field" : "md5(stringToBytes(avroPath($0,'/color')))",
"fields" : [
{ "name" : "color", "transform" : "avroPath($0,'/color')" },
{ "name" : "number", "transform" : "avroPath($0,'/number')" },
{ "name" : "height", "transform" : "avroPath($0,'/physical/height')" },
{ "name" : "weight", "transform" : "avroPath($0,'/physical/weight')" },
{ "name" : "geom", "transform" : "point(avroPath($0,'/lon'),avroPath($0,'/lat'))" }
],
"options" : {
"encoding" : "UTF-8",
"parse-mode" : "incremental",
"validators" : [ "index" ]
}
}
}
}
}
Dear experts,
I am using geomesa in a jupyter notebook and mainly interested in using its sparksql functions like st_intersects etc. I managed to load the libs.
But now I want to load a geojson file, and use that to intersect with a large number of data points.
Ive done something similar before, but then I ingested the geojson and stored it in geomesa-hbase. Then in my spark job I read if from there. But on my current environment (machine learning on kubernetes) it is not easy to read from a geomesa-hbase backend somewhere else.
So question: is it possible to directly ingest a file like this?
Im looking a bit for the geospatial equivalent like when you want to use a csv file in spark and do something liks spark.read.csv("somefile.csv") when somehow you would not be able to read it from hdfs.
Py4JJavaError: An error occurred while calling o82.collectToPython.
: java.lang.ArrayIndexOutOfBoundsException: 28499
at com.thoughtworks.paranamer.BytecodeReadingParanamer$ClassReader.accept(BytecodeReadingParanamer.java:563)
at com.thoughtworks.paranamer.BytecodeReadingParanamer$ClassReader.access$200(BytecodeReadingParanamer.java:338)
at com.thoughtworks.paranamer.BytecodeReadingParanamer.lookupParameterNames(BytecodeReadingParanamer.java:103)
at com.thoughtworks.paranamer.CachingParanamer.lookupParameterNames(CachingParanamer.java:79)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.getCtorParams(BeanIntrospector.scala:45)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$1(BeanIntrospector.scala:59)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$1$adapted(BeanIntrospector.scala:59)
at scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:245)
at scala.collection.Iterator.foreach(Iterator.scala:941)
at scala.collection.Iterator.foreach$(Iterator.scala:941)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
at scala.collection.IterableLike.foreach(IterableLike.scala:74)
at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
at scala.collection.TraversableLike.flatMap(TraversableLike.scala:245)
at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:242)
at scala.collection.AbstractTraversable.flatMap(Traversable.scala:108)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.findConstructorParam$1(BeanIntrospector.scala:59)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$19(BeanIntrospector.scala:181)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at scala.collection.TraversableLike.map(TraversableLike.scala:238)
at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:198)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$14(BeanIntrospector.scala:175)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$14$adapted(BeanIntrospector.scala:174)
at scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:245)
at scala.collection.immutable.List.foreach(List.scala:392)
at scala.collection.TraversableLike.flatMap(TraversableLike.scala:245)
at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:242)
at scala.collection.immutable.List.flatMap(List.scala:355)
at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.apply(BeanIntrospector.scala:174)
at com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector$._descriptorFor(ScalaAnnotationIntrospectorModule.scala:21)
at com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector$.fieldName(ScalaAnnotationIntrospectorModule.scala:29)
at com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector$.findImplicitPropertyName(ScalaAnnotationIntrospectorModule.scala:77)
at com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair.findImplicitPropertyName(AnnotationIntrospectorPair.java:490)
at com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector._addFields(POJOPropertiesCollector.java:380)
at com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector.collectAll(POJOPropertiesCollector.java:308)
at com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector.getJsonValueAccessor(POJOPropertiesCollector.java:196)
at com.fasterxml.jackson.databind.introspect.BasicBeanDescription.findJsonValueAccessor(BasicBeanDescription.java:252)
at com.fasterxml.jackson.databind.ser.BasicSerializerFactory.findSerializerByAnnotations(BasicSerializerFactory.java:346)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory._createSerializer2(BeanSerializerFactory.java:216)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory.createSerializer(BeanSerializerFactory.java:165)
at com.fasterxml.jackson.databind.SerializerProvider._createUntypedSerializer(SerializerProvider.java:1388)
at com.fasterxml.jackson.databind.SerializerProvider._createAndCacheUntypedSerializer(SerializerProvider.java:1336)
at com.fasterxml.jackson.databind.SerializerProvider.findValueSerializer(SerializerProvider.java:510)
at com.fasterxml.jackson.databind.SerializerProvider.findTypedValueSerializer(SerializerProvider.java:713)
at com.fasterxml.jackson.databind.ser.DefaultSerializerProvider.serializeValue(DefaultSerializerProvider.java:308)
at com.fasterxml.jackson.databind.ObjectMapper._configAndWriteValue(ObjectMapper.java:4094)
at com.fasterxml.jackson.databind.ObjectMapper.writeValueAsString(ObjectMapper.java:3404)
at org.apache.spark.rdd.RDDOperationScope.toJson(RDDOperationScope.scala:52)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:145)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.SparkContext.withScope(SparkContext.scala:786)
at org.apache.spark.SparkContext.newAPIHadoopRDD(SparkContext.scala:1275)
at org.locationtech.geomesa.spark.accumulo.AccumuloSpatialRDDProvider.queryPlanToRDD$1(AccumuloSpatialRDDProvider.scala:51)
at org.locationtech.geomesa.spark.accumulo.AccumuloSpatialRDDProvider.rdd(AccumuloSpatialRDDProvider.scala:66)
at org.locationtech.geomesa.spark.GeoMesaRelation.buildScan(GeoMesaRelation.scala:117)
Hi, I have a little problem with geomesa and flink 1.12.1. The job throws an exception like
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.locationtech.geomesa.cassandra.data.CassandraDataStoreFactory
It happens on flink 1.12.1 , but works properly on flink 1.7. Any ideas ?
Thanks a lot