io.prediction.data.storage.hbase

HBPEvents

class HBPEvents extends PEvents

Linear Supertypes
PEvents, Serializable, Serializable, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. HBPEvents
  2. PEvents
  3. Serializable
  4. Serializable
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new HBPEvents(client: HBClient, config: StorageClientConfig, namespace: String)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  7. def checkTableExists(appId: Int, channelId: Option[Int]): Unit

  8. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  9. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  10. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  11. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  12. def find(appId: Int, channelId: Option[Int] = None, startTime: Option[DateTime] = None, untilTime: Option[DateTime] = None, entityType: Option[String] = None, entityId: Option[String] = None, eventNames: Option[Seq[String]] = None, targetEntityType: Option[Option[String]] = None, targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event]

    :: DeveloperApi :: Read from database and return the events.

    :: DeveloperApi :: Read from database and return the events. The deprecation here is intended to engine developers only.

    appId

    return events of this app ID

    channelId

    return events of this channel ID (default channel if it's None)

    startTime

    return events with eventTime >= startTime

    untilTime

    return events with eventTime < untilTime

    entityType

    return events of this entityType

    entityId

    return events of this entityId

    eventNames

    return events with any of these event names.

    targetEntityType

    return events of this targetEntityType:

    • None means no restriction on targetEntityType
    • Some(None) means no targetEntityType for this event
    • Some(Some(x)) means targetEntityType should match x.
    targetEntityId

    return events of this targetEntityId

    • None means no restriction on targetEntityId
    • Some(None) means no targetEntityId for this event
    • Some(Some(x)) means targetEntityId should match x.
    sc

    Spark context

    returns

    RDD[Event]

    Definition Classes
    HBPEventsPEvents
  13. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  14. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  15. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  16. lazy val logger: Logger

    Attributes
    protected
    Definition Classes
    PEvents
  17. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  18. final def notify(): Unit

    Definition Classes
    AnyRef
  19. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  20. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  21. def toString(): String

    Definition Classes
    AnyRef → Any
  22. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  23. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  24. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  25. def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit

    :: DeveloperApi :: Write events to database

    :: DeveloperApi :: Write events to database

    events

    RDD of Event

    appId

    the app ID

    channelId

    channel ID (default channel if it's None)

    sc

    Spark Context

    Definition Classes
    HBPEventsPEvents
  26. def write(events: RDD[Event], appId: Int)(sc: SparkContext): Unit

    :: DeveloperApi :: Write events to database

    :: DeveloperApi :: Write events to database

    events

    RDD of Event

    appId

    the app ID

    sc

    Spark Context

    Definition Classes
    PEvents
    Annotations
    @DeveloperApi()

Deprecated Value Members

  1. def aggregateProperties(appId: Int, channelId: Option[Int] = None, entityType: String, startTime: Option[DateTime] = None, untilTime: Option[DateTime] = None, required: Option[Seq[String]] = None)(sc: SparkContext): RDD[(String, PropertyMap)]

    Aggregate properties of entities based on these special events: $set, $unset, $delete events.

    Aggregate properties of entities based on these special events: $set, $unset, $delete events. The deprecation here is intended to engine developers only.

    appId

    use events of this app ID

    channelId

    use events of this channel ID (default channel if it's None)

    entityType

    aggregate properties of the entities of this entityType

    startTime

    use events with eventTime >= startTime

    untilTime

    use events with eventTime < untilTime

    required

    only keep entities with these required properties defined

    sc

    Spark context

    returns

    RDD[(String, PropertyMap)] RDD of entityId and PropertyMap pair

    Definition Classes
    PEvents
    Annotations
    @deprecated
    Deprecated

    (Since version 0.9.2) Use PEventStore.aggregateProperties() instead.

  2. def extractEntityMap[A](appId: Int, entityType: String, startTime: Option[DateTime] = None, untilTime: Option[DateTime] = None, required: Option[Seq[String]] = None)(sc: SparkContext)(extract: (DataMap) ⇒ A)(implicit arg0: ClassTag[A]): EntityMap[A]

    :: Experimental :: Extract EntityMap[A] from events for the entityType NOTE: it is local EntityMap[A]

    :: Experimental :: Extract EntityMap[A] from events for the entityType NOTE: it is local EntityMap[A]

    Definition Classes
    PEvents
    Annotations
    @deprecated @Experimental()
    Deprecated

    (Since version 0.9.2) Use PEventStore.aggregateProperties() instead.

  3. def getByAppIdAndTimeAndEntity(appId: Int, startTime: Option[DateTime], untilTime: Option[DateTime], entityType: Option[String], entityId: Option[String])(sc: SparkContext): RDD[Event]

    Definition Classes
    PEvents
    Annotations
    @deprecated
    Deprecated

    (Since version 0.9.2) Use PEventStore.find() instead.

Inherited from PEvents

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped