object SharedBackendConf
- Alphabetic
- By Inheritance
- SharedBackendConf
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- val BACKEND_MODE_EXTERNAL: String
- val BACKEND_MODE_INTERNAL: String
-
val
PROP_ALLOW_INSECURE_XGBOOST: (String, Boolean)
If the property set to true, insecure communication among H2O nodes is allowed for the XGBoost algorithm.
-
val
PROP_AUTO_SSL_FLOW: (String, Boolean)
Automatically generate key store for H2O Flow SSL
-
val
PROP_BACKEND_CLUSTER_MODE: (String, String)
This option can be set either to "internal" or "external" When set to "external" H2O Context is created by connecting to existing H2O cluster, otherwise it creates H2O cluster living in Spark - that means that each Spark executor will have one h2o instance running in it.
This option can be set either to "internal" or "external" When set to "external" H2O Context is created by connecting to existing H2O cluster, otherwise it creates H2O cluster living in Spark - that means that each Spark executor will have one h2o instance running in it. The internal is not recommended for big clusters and clusters where Spark executors are not stable.
-
val
PROP_BACKEND_HEARTBEAT_INTERVAL: (String, Int)
Interval used to ping and check the H2O backend status.
-
val
PROP_CLIENT_EXTRA_PROPERTIES: (String, None.type)
Extra properties passed to H2O client during startup.
-
val
PROP_CLIENT_FLOW_BASEURL_OVERRIDE: (String, None.type)
Allows to override the base URL address of Flow UI, including the scheme, which is showed to the user.
-
val
PROP_CLIENT_ICED_DIR: (String, None.type)
Location of iced directory for the driver instance.
-
val
PROP_CLIENT_IP: (String, None.type)
IP of H2O client node
-
val
PROP_CLIENT_LOG_DIR: (String, None.type)
Location of log directory for the driver instance.
-
val
PROP_CLIENT_LOG_LEVEL: (String, String)
H2O log level for client running in Spark driver
-
val
PROP_CLIENT_NETWORK_MASK: (String, None.type)
Subnet selector for H2O client - if the mask is specified then Spark network setup is not discussed.
-
val
PROP_CLIENT_PORT_BASE: (String, Int)
Port on which H2O client publishes its API.
Port on which H2O client publishes its API. If already occupied, the next odd port is tried and so on
-
val
PROP_CLIENT_VERBOSE: (String, Boolean)
Print detailed messages to client stdout
-
val
PROP_CLIENT_WEB_PORT: (String, Int)
Exact client port to access web UI.
Exact client port to access web UI. The value
-1
means automatic search for free port starting atspark.ext.h2o.port.base
. -
val
PROP_CLOUD_NAME: (String, None.type)
Configuration property - name of H2O cloud
-
val
PROP_CLOUD_TIMEOUT: (String, Int)
Configuration property - timeout for cloud up.
-
val
PROP_CLUSTER_TOPOLOGY_LISTENER_ENABLED: (String, Boolean)
Enable/Disable listener which kills H2O when there is a change in underlying cluster's topology *
-
val
PROP_CONTEXT_PATH: (String, None.type)
H2O's URL context path
-
val
PROP_EXTERNAL_CLIENT_RETRY_TIMEOUT: (String, Int)
Timeout in milliseconds specifying how often the H2O backend checks whether the Sparkling Water client (either H2O client or REST) is connected
-
val
PROP_FAIL_ON_UNSUPPORTED_SPARK_PARAM: (String, Boolean)
Enable/Disable exit on unsupported Spark parameters.
-
val
PROP_FLOW_DIR: (String, None.type)
Path to flow dir.
-
val
PROP_FLOW_EXTRA_HTTP_HEADERS: (String, None.type)
Extra http headers for Flow UI
-
val
PROP_FLOW_SCALA_CELL_ASYNC: (String, Boolean)
Decide whether Scala cells are running synchronously or asynchronously
-
val
PROP_FLOW_SCALA_CELL_MAX_PARALLEL: (String, Int)
Number of max parallel Scala cell jobs.
-
val
PROP_HASH_LOGIN: (String, Boolean)
Enable hash login.
-
val
PROP_HIVE_HOST: (String, None.type)
The full address of HiveServer2, for example hostname:10000
-
val
PROP_HIVE_JDBC_URL_PATTERN: (String, None.type)
Can be used to further customize the way the driver constructs the Hive JDBC URL
-
val
PROP_HIVE_PRINCIPAL: (String, None.type)
Hiveserver2 Kerberos principal, for example hive/hostname@DOMAIN.COM
-
val
PROP_HIVE_TOKEN: (String, None.type)
Authorization token to Hive
-
val
PROP_INTERNAL_PORT_OFFSET: (String, Int)
Offset between the API(=web) port and the internal communication port; api_port + port_offset = h2o_port
-
val
PROP_INTERNAL_SECURE_CONNECTIONS: (String, Boolean)
Secure internal connections by automatically generated credentials
-
val
PROP_JKS: (String, None.type)
Path to Java KeyStore file.
-
val
PROP_JKS_ALIAS: (String, None.type)
Alias for certificate in keystore to secure Flow
-
val
PROP_JKS_PASS: (String, None.type)
Password for Java KeyStore file.
-
val
PROP_KERBERIZED_HIVE_ENABLED: (String, Boolean)
If enabled, H2O instances will create JDBC connections to a Kerberized Hive so that all clients can read data from HiveServer2.
If enabled, H2O instances will create JDBC connections to a Kerberized Hive so that all clients can read data from HiveServer2. Don't forget to put a jar with Hive driver on spark classpath if the internal backend is used.
-
val
PROP_KERBEROS_LOGIN: (String, Boolean)
Enable Kerberos login.
-
val
PROP_LDAP_LOGIN: (String, Boolean)
Enable LDAP login.
-
val
PROP_LOGIN_CONF: (String, None.type)
Login configuration file.
-
val
PROP_MOJO_DESTROY_TIMEOUT: (String, Int)
If a scoring MOJO instance is not used within a Spark executor JVM for a given timeout in milliseconds, it's evicted from executor's cache.
-
val
PROP_NODE_EXTRA_PROPERTIES: (String, None.type)
Extra properties passed to H2O nodes during startup.
-
val
PROP_NODE_LOG_DIR: (String, None.type)
Location of log directory for remote nodes.
-
val
PROP_NODE_LOG_LEVEL: (String, String)
H2O internal log level for launched remote nodes.
-
val
PROP_NODE_NETWORK_MASK: (String, None.type)
Subnet selector for H2O nodes running inside executors - if the mask is specified then Spark network setup is not discussed.
-
val
PROP_NODE_PORT_BASE: (String, Int)
Configuration property - base port used for individual H2O nodes configuration.
-
val
PROP_NODE_STACK_TRACE_COLLECTOR_INTERVAL: (String, Int)
Set how often in seconds stack traces are taken on each h2o node.
Set how often in seconds stack traces are taken on each h2o node. -1 represents that the stack traces are not collected.
-
val
PROP_NTHREADS: (String, Int)
Limit for number of threads used by H2O, default -1 means unlimited
-
val
PROP_PASSWORD: (String, None.type)
Password for the client authentication.
-
val
PROP_REPL_ENABLED: (String, Boolean)
Enable/Disable Sparkling-Water REPL *
-
val
PROP_SCALA_INT_DEFAULT_NUM: (String, Int)
Number of executors started at the start of h2o services, by default 1
-
val
PROP_SPARK_VERSION_CHECK_ENABLED: (String, Boolean)
Enable/Disable check for Spark version.
-
val
PROP_SSL_CONF: (String, None.type)
Path to Java KeyStore file used for the internal SSL communication.
-
val
PROP_USER_NAME: (String, None.type)
User name for cluster and the client authentication.
-
val
PROP_VERIFY_SSL_CERTIFICATES: (String, Boolean)
Whether certificates should be verified before using in H2O or not.
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()