@@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableSortedSet
23
23
import com .typesafe .scalalogging .slf4j .Logging
24
24
import org .apache .accumulo .core .client ._
25
25
import org .apache .accumulo .core .client .admin .TimeType
26
- import org .apache .accumulo .core .client .mock .MockConnector
27
26
import org .apache .accumulo .core .client .security .tokens .AuthenticationToken
28
27
import org .apache .accumulo .core .data .{Key , Value }
29
28
import org .apache .accumulo .core .file .keyfunctor .{ColumnFamilyFunctor , RowFunctor }
@@ -32,6 +31,7 @@ import org.apache.hadoop.io.Text
32
31
import org .geotools .data ._
33
32
import org .geotools .data .simple .SimpleFeatureSource
34
33
import org .geotools .factory .Hints
34
+ import org .geotools .feature .NameImpl
35
35
import org .geotools .geometry .jts .ReferencedEnvelope
36
36
import org .geotools .process .vector .TransformProcess
37
37
import org .locationtech .geomesa .core
@@ -44,7 +44,7 @@ import org.locationtech.geomesa.core.security.AuthorizationsProvider
44
44
import org .locationtech .geomesa .data .TableSplitter
45
45
import org .locationtech .geomesa .utils .geotools .SimpleFeatureTypes
46
46
import org .locationtech .geomesa .utils .geotools .SimpleFeatureTypes .{FeatureSpec , NonGeomAttributeSpec }
47
- import org .opengis .feature .`type` .AttributeDescriptor
47
+ import org .opengis .feature .`type` .{ AttributeDescriptor , Name }
48
48
import org .opengis .feature .simple .SimpleFeatureType
49
49
import org .opengis .filter .Filter
50
50
import org .opengis .referencing .crs .CoordinateReferenceSystem
@@ -184,7 +184,7 @@ class AccumuloDataStore(val connector: Connector,
184
184
* @param attributes
185
185
*/
186
186
def updateIndexedAttributes (featureName : String , attributes : String ): Unit = {
187
- val FeatureSpec (existing, _) = SimpleFeatureTypes .parse(getAttributes(featureName))
187
+ val FeatureSpec (existing, _) = SimpleFeatureTypes .parse(getAttributes(featureName).getOrElse( " " ) )
188
188
val FeatureSpec (updated, _) = SimpleFeatureTypes .parse(attributes)
189
189
// check that the only changes are to non-geometry index flags
190
190
val ok = existing.length == updated.length &&
@@ -605,12 +605,18 @@ class AccumuloDataStore(val connector: Connector,
605
605
606
606
// NB: By default, AbstractDataStore is "isWriteable". This means that createFeatureSource returns
607
607
// a featureStore
608
- override def getFeatureSource (featureName : String ): SimpleFeatureSource = {
609
- validateMetadata(featureName)
610
- if (! cachingConfig) new AccumuloFeatureStore (this , featureName)
611
- else new AccumuloFeatureStore (this , featureName) with CachingFeatureSource
608
+ override def getFeatureSource (typeName : Name ): SimpleFeatureSource = {
609
+ validateMetadata(typeName.getLocalPart)
610
+ if (cachingConfig) {
611
+ new AccumuloFeatureStore (this , typeName) with CachingFeatureSource
612
+ } else {
613
+ new AccumuloFeatureStore (this , typeName)
614
+ }
612
615
}
613
616
617
+ override def getFeatureSource (typeName : String ): SimpleFeatureSource =
618
+ getFeatureSource(new NameImpl (typeName))
619
+
614
620
/**
615
621
* Reads the index schema format out of the metadata
616
622
*
@@ -627,7 +633,7 @@ class AccumuloDataStore(val connector: Connector,
627
633
* @return
628
634
*/
629
635
private def getAttributes (featureName : String ) =
630
- metadata.read(featureName, ATTRIBUTES_KEY ).getOrElse( EMPTY_STRING )
636
+ metadata.read(featureName, ATTRIBUTES_KEY )
631
637
632
638
/**
633
639
* Reads the feature encoding from the metadata. Defaults to TEXT if there is no metadata.
@@ -693,24 +699,24 @@ class AccumuloDataStore(val connector: Connector,
693
699
* @return the corresponding feature type (schema) for this feature name,
694
700
* or NULL if this feature name does not appear to exist
695
701
*/
696
- override def getSchema (featureName : String ): SimpleFeatureType =
697
- getAttributes(featureName) match {
698
- case attributes if attributes.isEmpty =>
699
- null
700
- case attributes =>
701
- val sft = SimpleFeatureTypes .createType(featureName , attributes)
702
- val dtgField = metadata.read(featureName, DTGFIELD_KEY )
703
- .getOrElse(core. DEFAULT_DTG_PROPERTY_NAME )
704
- val indexSchema = metadata.read(featureName, SCHEMA_KEY ).orNull
705
- // If no data is written, we default to 'false' in order to support old tables.
706
- val sharingBoolean = metadata.read(featureName, SHARED_TABLES_KEY ).getOrElse( " false " )
707
-
708
- sft.getUserData.put(core.index.SF_PROPERTY_START_TIME , dtgField)
709
- sft.getUserData.put(core.index.SF_PROPERTY_END_TIME , dtgField )
710
- sft.getUserData.put( core.index.SFT_INDEX_SCHEMA , indexSchema )
711
- core.index.setTableSharing( sft, new java.lang. Boolean (sharingBoolean))
712
- sft
713
- }
702
+ override def getSchema (featureName : String ): SimpleFeatureType = getSchema( new NameImpl (featureName))
703
+
704
+ override def getSchema ( name : Name ) : SimpleFeatureType = {
705
+ val featureName = name.getLocalPart
706
+ getAttributes(featureName).map { attributes =>
707
+ val sft = SimpleFeatureTypes .createType(name.getURI , attributes)
708
+ val dtgField = metadata.read(featureName, DTGFIELD_KEY ).getOrElse(core. DEFAULT_DTG_PROPERTY_NAME )
709
+ val indexSchema = metadata.read(featureName, SCHEMA_KEY ).orNull
710
+ // If no data is written, we default to 'false' in order to support old tables.
711
+ val sharingBoolean = metadata.read(featureName, SHARED_TABLES_KEY ).getOrElse( " false " )
712
+
713
+ sft.getUserData.put(core.index. SF_PROPERTY_START_TIME , dtgField)
714
+ sft.getUserData.put(core.index.SF_PROPERTY_END_TIME , dtgField)
715
+ sft.getUserData.put(core.index.SFT_INDEX_SCHEMA , indexSchema )
716
+ core.index.setTableSharing(sft, new java.lang. Boolean (sharingBoolean) )
717
+ sft
718
+ }.orNull
719
+ }
714
720
715
721
// Implementation of Abstract method
716
722
def getFeatureReader (featureName : String ): AccumuloFeatureReader = getFeatureReader(featureName, Query .ALL )
0 commit comments