You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flume.apache.org by rg...@apache.org on 2022/06/05 05:33:25 UTC

[flume-site] branch asf-staging updated: Add apidocs and manuals

This is an automated email from the ASF dual-hosted git repository.

rgoers pushed a commit to branch asf-staging
in repository https://gitbox.apache.org/repos/asf/flume-site.git


The following commit(s) were added to refs/heads/asf-staging by this push:
     new 4569179  Add apidocs and manuals
4569179 is described below

commit 4569179fc13dabe15abbc7d175022d45f451c73e
Author: Ralph Goers <rg...@apache.org>
AuthorDate: Sat Jun 4 22:33:15 2022 -0700

    Add apidocs and manuals
---
 .../content/1.10.0/FlumeDeveloperGuide.html        |  1074 ++
 .../releases/content/1.10.0/FlumeUserGuide.html    |  8943 +++++++++++++
 .../content/1.10.0/apidocs/allclasses-frame.html   |   609 +
 .../content/1.10.0/apidocs/allclasses-noframe.html |   609 +
 .../flume/handlers/thrift/EventStatus.html         |   398 +
 .../cloudera/flume/handlers/thrift/Priority.html   |   434 +
 .../handlers/thrift/ThriftFlumeEvent._Fields.html  |   485 +
 .../flume/handlers/thrift/ThriftFlumeEvent.html    |  1163 ++
 ...ThriftFlumeEventServer.AsyncClient.Factory.html |   288 +
 ...ftFlumeEventServer.AsyncClient.append_call.html |   359 +
 ...iftFlumeEventServer.AsyncClient.close_call.html |   357 +
 .../thrift/ThriftFlumeEventServer.AsyncClient.html |   369 +
 .../thrift/ThriftFlumeEventServer.AsyncIface.html  |   256 +
 ...riftFlumeEventServer.AsyncProcessor.append.html |   355 +
 ...hriftFlumeEventServer.AsyncProcessor.close.html |   355 +
 .../ThriftFlumeEventServer.AsyncProcessor.html     |   318 +
 .../ThriftFlumeEventServer.Client.Factory.html     |   305 +
 .../thrift/ThriftFlumeEventServer.Client.html      |   423 +
 .../thrift/ThriftFlumeEventServer.Iface.html       |   254 +
 .../ThriftFlumeEventServer.Processor.append.html   |   351 +
 .../ThriftFlumeEventServer.Processor.close.html    |   351 +
 .../thrift/ThriftFlumeEventServer.Processor.html   |   311 +
 ...ThriftFlumeEventServer.append_args._Fields.html |   421 +
 .../thrift/ThriftFlumeEventServer.append_args.html |   684 +
 .../ThriftFlumeEventServer.close_args._Fields.html |   387 +
 .../thrift/ThriftFlumeEventServer.close_args.html  |   591 +
 ...hriftFlumeEventServer.close_result._Fields.html |   387 +
 .../ThriftFlumeEventServer.close_result.html       |   591 +
 .../handlers/thrift/ThriftFlumeEventServer.html    |   292 +
 .../handlers/thrift/class-use/EventStatus.html     |   181 +
 .../flume/handlers/thrift/class-use/Priority.html  |   227 +
 .../thrift/class-use/ThriftFlumeEvent._Fields.html |   235 +
 .../thrift/class-use/ThriftFlumeEvent.html         |   277 +
 ...ThriftFlumeEventServer.AsyncClient.Factory.html |   126 +
 ...ftFlumeEventServer.AsyncClient.append_call.html |   126 +
 ...iftFlumeEventServer.AsyncClient.close_call.html |   126 +
 .../ThriftFlumeEventServer.AsyncClient.html        |   166 +
 .../ThriftFlumeEventServer.AsyncIface.html         |   187 +
 ...riftFlumeEventServer.AsyncProcessor.append.html |   126 +
 ...hriftFlumeEventServer.AsyncProcessor.close.html |   126 +
 .../ThriftFlumeEventServer.AsyncProcessor.html     |   126 +
 .../ThriftFlumeEventServer.Client.Factory.html     |   126 +
 .../class-use/ThriftFlumeEventServer.Client.html   |   171 +
 .../class-use/ThriftFlumeEventServer.Iface.html    |   187 +
 .../ThriftFlumeEventServer.Processor.append.html   |   126 +
 .../ThriftFlumeEventServer.Processor.close.html    |   126 +
 .../ThriftFlumeEventServer.Processor.html          |   126 +
 ...ThriftFlumeEventServer.append_args._Fields.html |   235 +
 .../ThriftFlumeEventServer.append_args.html        |   219 +
 .../ThriftFlumeEventServer.close_args._Fields.html |   235 +
 .../ThriftFlumeEventServer.close_args.html         |   215 +
 ...hriftFlumeEventServer.close_result._Fields.html |   235 +
 .../ThriftFlumeEventServer.close_result.html       |   201 +
 .../thrift/class-use/ThriftFlumeEventServer.html   |   126 +
 .../flume/handlers/thrift/package-frame.html       |    51 +
 .../flume/handlers/thrift/package-summary.html     |   270 +
 .../flume/handlers/thrift/package-tree.html        |   205 +
 .../flume/handlers/thrift/package-use.html         |   206 +
 .../content/1.10.0/apidocs/constant-values.html    |  5576 ++++++++
 .../content/1.10.0/apidocs/deprecated-list.html    |   374 +
 .../releases/content/1.10.0/apidocs/help-doc.html  |   231 +
 .../releases/content/1.10.0/apidocs/index-all.html | 13075 +++++++++++++++++++
 content/releases/content/1.10.0/apidocs/index.html |    76 +
 .../1.10.0/apidocs/org/apache/flume/Channel.html   |   343 +
 .../apidocs/org/apache/flume/ChannelException.html |   313 +
 .../apidocs/org/apache/flume/ChannelFactory.html   |   252 +
 .../org/apache/flume/ChannelFullException.html     |   313 +
 .../apidocs/org/apache/flume/ChannelSelector.html  |   323 +
 .../1.10.0/apidocs/org/apache/flume/Clock.html     |   228 +
 .../1.10.0/apidocs/org/apache/flume/Constants.html |   250 +
 .../1.10.0/apidocs/org/apache/flume/Context.html   |   746 ++
 .../apidocs/org/apache/flume/CounterGroup.html     |   412 +
 .../1.10.0/apidocs/org/apache/flume/Dummy.html     |   240 +
 .../1.10.0/apidocs/org/apache/flume/Event.html     |   288 +
 .../org/apache/flume/EventDeliveryException.html   |   304 +
 .../org/apache/flume/EventDrivenSource.html        |   216 +
 .../apidocs/org/apache/flume/FlumeException.html   |   300 +
 .../apidocs/org/apache/flume/NamedComponent.html   |   248 +
 .../org/apache/flume/PollableSource.Status.html    |   347 +
 .../apidocs/org/apache/flume/PollableSource.html   |   327 +
 .../apidocs/org/apache/flume/Sink.Status.html      |   347 +
 .../1.10.0/apidocs/org/apache/flume/Sink.html      |   343 +
 .../apidocs/org/apache/flume/SinkFactory.html      |   252 +
 .../apidocs/org/apache/flume/SinkProcessor.html    |   302 +
 .../org/apache/flume/SinkRunner.PollingRunner.html |   289 +
 .../apidocs/org/apache/flume/SinkRunner.html       |   437 +
 .../1.10.0/apidocs/org/apache/flume/Source.html    |   294 +
 .../apidocs/org/apache/flume/SourceFactory.html    |   252 +
 .../apidocs/org/apache/flume/SourceRunner.html     |   332 +
 .../apidocs/org/apache/flume/SystemClock.html      |   283 +
 .../apache/flume/Transaction.TransactionState.html |   371 +
 .../apidocs/org/apache/flume/Transaction.html      |   342 +
 .../org/apache/flume/VersionAnnotation.html        |   356 +
 .../apache/flume/agent/embedded/EmbeddedAgent.html |   389 +
 .../agent/embedded/EmbeddedAgentConfiguration.html |   576 +
 .../flume/agent/embedded/EmbeddedSource.html       |   369 +
 .../agent/embedded/class-use/EmbeddedAgent.html    |   126 +
 .../class-use/EmbeddedAgentConfiguration.html      |   126 +
 .../agent/embedded/class-use/EmbeddedSource.html   |   126 +
 .../apache/flume/agent/embedded/package-frame.html |    23 +
 .../flume/agent/embedded/package-summary.html      |   173 +
 .../apache/flume/agent/embedded/package-tree.html  |   145 +
 .../apache/flume/agent/embedded/package-use.html   |   126 +
 .../org/apache/flume/annotations/Disposable.html   |   167 +
 .../InterfaceAudience.LimitedPrivate.html          |   216 +
 .../annotations/InterfaceAudience.Private.html     |   167 +
 .../annotations/InterfaceAudience.Public.html      |   167 +
 .../flume/annotations/InterfaceAudience.html       |   253 +
 .../annotations/InterfaceStability.Evolving.html   |   167 +
 .../annotations/InterfaceStability.Stable.html     |   168 +
 .../annotations/InterfaceStability.Unstable.html   |   168 +
 .../flume/annotations/InterfaceStability.html      |   286 +
 .../org/apache/flume/annotations/Recyclable.html   |   167 +
 .../flume/annotations/class-use/Disposable.html    |   195 +
 .../InterfaceAudience.LimitedPrivate.html          |   168 +
 .../class-use/InterfaceAudience.Private.html       |   587 +
 .../class-use/InterfaceAudience.Public.html        |   521 +
 .../annotations/class-use/InterfaceAudience.html   |   126 +
 .../class-use/InterfaceStability.Evolving.html     |   469 +
 .../class-use/InterfaceStability.Stable.html       |   495 +
 .../class-use/InterfaceStability.Unstable.html     |   348 +
 .../annotations/class-use/InterfaceStability.html  |   126 +
 .../flume/annotations/class-use/Recyclable.html    |   178 +
 .../apache/flume/annotations/package-frame.html    |    33 +
 .../apache/flume/annotations/package-summary.html  |   210 +
 .../org/apache/flume/annotations/package-tree.html |   151 +
 .../org/apache/flume/annotations/package-use.html  |   761 ++
 .../org/apache/flume/api/AbstractRpcClient.html    |   530 +
 .../org/apache/flume/api/FailoverRpcClient.html    |   497 +
 .../apidocs/org/apache/flume/api/HostInfo.html     |   337 +
 .../api/LoadBalancingRpcClient.HostSelector.html   |   253 +
 .../apache/flume/api/LoadBalancingRpcClient.html   |   486 +
 .../org/apache/flume/api/NettyAvroRpcClient.html   |   508 +
 .../apidocs/org/apache/flume/api/RpcClient.html    |   362 +
 .../flume/api/RpcClientConfigurationConstants.html |   851 ++
 .../flume/api/RpcClientFactory.ClientType.html     |   396 +
 .../org/apache/flume/api/RpcClientFactory.html     |   534 +
 .../api/SSLContextAwareAbstractRpcClient.html      |   445 +
 .../apache/flume/api/SecureRpcClientFactory.html   |   286 +
 ...cureThriftRpcClient.UgiSaslClientTransport.html |   591 +
 .../apache/flume/api/SecureThriftRpcClient.html    |   395 +
 .../org/apache/flume/api/ThriftRpcClient.html      |   564 +
 .../flume/api/class-use/AbstractRpcClient.html     |   193 +
 .../flume/api/class-use/FailoverRpcClient.html     |   126 +
 .../org/apache/flume/api/class-use/HostInfo.html   |   196 +
 .../LoadBalancingRpcClient.HostSelector.html       |   126 +
 .../api/class-use/LoadBalancingRpcClient.html      |   126 +
 .../flume/api/class-use/NettyAvroRpcClient.html    |   126 +
 .../org/apache/flume/api/class-use/RpcClient.html  |   315 +
 .../class-use/RpcClientConfigurationConstants.html |   126 +
 .../api/class-use/RpcClientFactory.ClientType.html |   175 +
 .../flume/api/class-use/RpcClientFactory.html      |   126 +
 .../SSLContextAwareAbstractRpcClient.html          |   176 +
 .../api/class-use/SecureRpcClientFactory.html      |   126 +
 ...cureThriftRpcClient.UgiSaslClientTransport.html |   126 +
 .../flume/api/class-use/SecureThriftRpcClient.html |   126 +
 .../flume/api/class-use/ThriftRpcClient.html       |   166 +
 .../org/apache/flume/api/package-frame.html        |    41 +
 .../org/apache/flume/api/package-summary.html      |   242 +
 .../apidocs/org/apache/flume/api/package-tree.html |   188 +
 .../apidocs/org/apache/flume/api/package-use.html  |   200 +
 .../apache/flume/auth/FlumeAuthenticationUtil.html |   310 +
 .../org/apache/flume/auth/FlumeAuthenticator.html  |   288 +
 .../org/apache/flume/auth/KerberosUser.html        |   340 +
 .../org/apache/flume/auth/PrivilegedExecutor.html  |   272 +
 .../org/apache/flume/auth/SecurityException.html   |   296 +
 .../auth/class-use/FlumeAuthenticationUtil.html    |   126 +
 .../flume/auth/class-use/FlumeAuthenticator.html   |   170 +
 .../apache/flume/auth/class-use/KerberosUser.html  |   126 +
 .../flume/auth/class-use/PrivilegedExecutor.html   |   212 +
 .../flume/auth/class-use/SecurityException.html    |   170 +
 .../org/apache/flume/auth/package-frame.html       |    31 +
 .../org/apache/flume/auth/package-summary.html     |   195 +
 .../org/apache/flume/auth/package-tree.html        |   161 +
 .../apidocs/org/apache/flume/auth/package-use.html |   195 +
 .../org/apache/flume/channel/AbstractChannel.html  |   440 +
 .../flume/channel/AbstractChannelSelector.html     |   397 +
 .../flume/channel/BasicChannelSemantics.html       |   423 +
 .../channel/BasicTransactionSemantics.State.html   |   386 +
 .../flume/channel/BasicTransactionSemantics.html   |   604 +
 .../org/apache/flume/channel/ChannelProcessor.html |   393 +
 .../flume/channel/ChannelSelectorFactory.html      |   290 +
 .../org/apache/flume/channel/ChannelUtils.html     |   434 +
 .../flume/channel/DefaultChannelFactory.html       |   307 +
 .../channel/LoadBalancingChannelSelector.html      |   361 +
 .../org/apache/flume/channel/MemoryChannel.html    |   438 +
 .../flume/channel/MultiplexingChannelSelector.html |   458 +
 .../PseudoTxnMemoryChannel.NoOpTransaction.html    |   399 +
 .../flume/channel/PseudoTxnMemoryChannel.html      |   508 +
 .../flume/channel/ReplicatingChannelSelector.html  |   395 +
 .../SpillableMemoryChannel.DrainOrderQueue.html    |   430 +
 .../flume/channel/SpillableMemoryChannel.html      |   853 ++
 .../flume/channel/class-use/AbstractChannel.html   |   268 +
 .../channel/class-use/AbstractChannelSelector.html |   178 +
 .../channel/class-use/BasicChannelSemantics.html   |   226 +
 .../class-use/BasicTransactionSemantics.State.html |   179 +
 .../class-use/BasicTransactionSemantics.html       |   222 +
 .../flume/channel/class-use/ChannelProcessor.html  |   226 +
 .../channel/class-use/ChannelSelectorFactory.html  |   126 +
 .../flume/channel/class-use/ChannelUtils.html      |   126 +
 .../channel/class-use/DefaultChannelFactory.html   |   126 +
 .../class-use/LoadBalancingChannelSelector.html    |   126 +
 .../flume/channel/class-use/MemoryChannel.html     |   126 +
 .../class-use/MultiplexingChannelSelector.html     |   126 +
 .../PseudoTxnMemoryChannel.NoOpTransaction.html    |   126 +
 .../channel/class-use/PseudoTxnMemoryChannel.html  |   126 +
 .../class-use/ReplicatingChannelSelector.html      |   126 +
 .../SpillableMemoryChannel.DrainOrderQueue.html    |   166 +
 .../channel/class-use/SpillableMemoryChannel.html  |   126 +
 .../flume/channel/file/BadCheckpointException.html |   290 +
 .../flume/channel/file/CheckpointRebuilder.html    |   307 +
 .../flume/channel/file/CorruptEventException.html  |   290 +
 .../org/apache/flume/channel/file/EventUtils.html  |   282 +
 .../org/apache/flume/channel/file/FileChannel.html |   554 +
 .../channel/file/FileChannelConfiguration.html     |   805 ++
 .../apidocs/org/apache/flume/channel/file/Log.html |   392 +
 .../file/LogFile.OperationRecordUpdater.html       |   303 +
 .../channel/file/LogFile.SequentialReader.html     |   376 +
 .../org/apache/flume/channel/file/LogFile.html     |   448 +
 .../channel/file/LogFileRetryableIOException.html  |   295 +
 .../channel/file/LogFileV3.SequentialReader.html   |   301 +
 .../org/apache/flume/channel/file/LogFileV3.html   |   340 +
 .../org/apache/flume/channel/file/LogRecord.html   |   325 +
 .../org/apache/flume/channel/file/LogUtils.html    |   239 +
 .../flume/channel/file/NoopRecordException.html    |   290 +
 .../apache/flume/channel/file/Serialization.html   |   400 +
 .../flume/channel/file/TransactionEventRecord.html |   313 +
 .../flume/channel/file/TransactionIDOracle.html    |   252 +
 .../flume/channel/file/WriteOrderOracle.html       |   252 +
 .../file/class-use/BadCheckpointException.html     |   126 +
 .../file/class-use/CheckpointRebuilder.html        |   126 +
 .../file/class-use/CorruptEventException.html      |   174 +
 .../flume/channel/file/class-use/EventUtils.html   |   126 +
 .../flume/channel/file/class-use/FileChannel.html  |   169 +
 .../file/class-use/FileChannelConfiguration.html   |   126 +
 .../apache/flume/channel/file/class-use/Log.html   |   126 +
 .../class-use/LogFile.OperationRecordUpdater.html  |   126 +
 .../file/class-use/LogFile.SequentialReader.html   |   166 +
 .../flume/channel/file/class-use/LogFile.html      |   168 +
 .../class-use/LogFileRetryableIOException.html     |   126 +
 .../file/class-use/LogFileV3.SequentialReader.html |   126 +
 .../flume/channel/file/class-use/LogFileV3.html    |   126 +
 .../flume/channel/file/class-use/LogRecord.html    |   179 +
 .../flume/channel/file/class-use/LogUtils.html     |   126 +
 .../file/class-use/NoopRecordException.html        |   126 +
 .../channel/file/class-use/Serialization.html      |   126 +
 .../file/class-use/TransactionEventRecord.html     |   194 +
 .../file/class-use/TransactionIDOracle.html        |   126 +
 .../channel/file/class-use/WriteOrderOracle.html   |   126 +
 .../AESCTRNoPaddingProvider.DecryptorBuilder.html  |   308 +
 .../AESCTRNoPaddingProvider.EncryptorBuilder.html  |   308 +
 .../file/encryption/AESCTRNoPaddingProvider.html   |   329 +
 .../CipherProvider.Decryptor.Builder.html          |   357 +
 .../file/encryption/CipherProvider.Decryptor.html  |   311 +
 .../CipherProvider.Encryptor.Builder.html          |   331 +
 .../file/encryption/CipherProvider.Encryptor.html  |   324 +
 .../channel/file/encryption/CipherProvider.html    |   313 +
 .../file/encryption/CipherProviderFactory.html     |   292 +
 .../file/encryption/CipherProviderType.html        |   356 +
 .../encryption/DecryptionFailureException.html     |   290 +
 .../file/encryption/EncryptionConfiguration.html   |   389 +
 .../encryption/JCEFileKeyProvider.Builder.html     |   286 +
 .../file/encryption/JCEFileKeyProvider.html        |   309 +
 .../file/encryption/KeyProvider.Builder.html       |   231 +
 .../flume/channel/file/encryption/KeyProvider.html |   299 +
 .../file/encryption/KeyProviderFactory.html        |   275 +
 .../channel/file/encryption/KeyProviderType.html   |   356 +
 .../AESCTRNoPaddingProvider.DecryptorBuilder.html  |   126 +
 .../AESCTRNoPaddingProvider.EncryptorBuilder.html  |   126 +
 .../class-use/AESCTRNoPaddingProvider.html         |   126 +
 .../CipherProvider.Decryptor.Builder.html          |   191 +
 .../class-use/CipherProvider.Decryptor.html        |   183 +
 .../CipherProvider.Encryptor.Builder.html          |   187 +
 .../class-use/CipherProvider.Encryptor.html        |   182 +
 .../file/encryption/class-use/CipherProvider.html  |   179 +
 .../class-use/CipherProviderFactory.html           |   126 +
 .../encryption/class-use/CipherProviderType.html   |   175 +
 .../class-use/DecryptionFailureException.html      |   126 +
 .../class-use/EncryptionConfiguration.html         |   126 +
 .../class-use/JCEFileKeyProvider.Builder.html      |   126 +
 .../encryption/class-use/JCEFileKeyProvider.html   |   126 +
 .../encryption/class-use/KeyProvider.Builder.html  |   179 +
 .../file/encryption/class-use/KeyProvider.html     |   223 +
 .../encryption/class-use/KeyProviderFactory.html   |   126 +
 .../file/encryption/class-use/KeyProviderType.html |   175 +
 .../channel/file/encryption/package-frame.html     |    47 +
 .../channel/file/encryption/package-summary.html   |   252 +
 .../channel/file/encryption/package-tree.html      |   198 +
 .../flume/channel/file/encryption/package-use.html |   206 +
 .../file/instrumentation/FileChannelCounter.html   |   553 +
 .../instrumentation/FileChannelCounterMBean.html   |   364 +
 .../class-use/FileChannelCounter.html              |   126 +
 .../class-use/FileChannelCounterMBean.html         |   166 +
 .../file/instrumentation/package-frame.html        |    25 +
 .../file/instrumentation/package-summary.html      |   159 +
 .../channel/file/instrumentation/package-tree.html |   155 +
 .../channel/file/instrumentation/package-use.html  |   159 +
 .../apache/flume/channel/file/package-frame.html   |    43 +
 .../apache/flume/channel/file/package-summary.html |   247 +
 .../apache/flume/channel/file/package-tree.html    |   192 +
 .../org/apache/flume/channel/file/package-use.html |   196 +
 .../proto/ProtosFactory.ActiveLog.Builder.html     |   650 +
 .../file/proto/ProtosFactory.ActiveLog.html        |   928 ++
 .../proto/ProtosFactory.ActiveLogOrBuilder.html    |   301 +
 .../proto/ProtosFactory.Checkpoint.Builder.html    |  1110 ++
 .../file/proto/ProtosFactory.Checkpoint.html       |  1159 ++
 .../proto/ProtosFactory.CheckpointOrBuilder.html   |   445 +
 .../file/proto/ProtosFactory.Commit.Builder.html   |   578 +
 .../channel/file/proto/ProtosFactory.Commit.html   |   871 ++
 .../file/proto/ProtosFactory.CommitOrBuilder.html  |   269 +
 .../proto/ProtosFactory.FlumeEvent.Builder.html    |   894 ++
 .../file/proto/ProtosFactory.FlumeEvent.html       |   988 ++
 .../ProtosFactory.FlumeEventHeader.Builder.html    |   722 +
 .../file/proto/ProtosFactory.FlumeEventHeader.html |   968 ++
 .../ProtosFactory.FlumeEventHeaderOrBuilder.html   |   333 +
 .../proto/ProtosFactory.FlumeEventOrBuilder.html   |   349 +
 .../ProtosFactory.LogFileEncryption.Builder.html   |   794 ++
 .../proto/ProtosFactory.LogFileEncryption.html     |  1025 ++
 .../ProtosFactory.LogFileEncryptionOrBuilder.html  |   365 +
 .../ProtosFactory.LogFileMetaData.Builder.html     |  1078 ++
 .../file/proto/ProtosFactory.LogFileMetaData.html  |  1233 ++
 .../ProtosFactory.LogFileMetaDataOrBuilder.html    |   477 +
 .../file/proto/ProtosFactory.Put.Builder.html      |   718 +
 .../channel/file/proto/ProtosFactory.Put.html      |   948 ++
 .../file/proto/ProtosFactory.PutOrBuilder.html     |   317 +
 .../file/proto/ProtosFactory.Rollback.Builder.html |   506 +
 .../channel/file/proto/ProtosFactory.Rollback.html |   814 ++
 .../proto/ProtosFactory.RollbackOrBuilder.html     |   210 +
 .../file/proto/ProtosFactory.Take.Builder.html     |   650 +
 .../channel/file/proto/ProtosFactory.Take.html     |   928 ++
 .../file/proto/ProtosFactory.TakeOrBuilder.html    |   301 +
 ...otosFactory.TransactionEventFooter.Builder.html |   506 +
 .../ProtosFactory.TransactionEventFooter.html      |   814 ++
 ...tosFactory.TransactionEventFooterOrBuilder.html |   210 +
 ...otosFactory.TransactionEventHeader.Builder.html |   722 +
 .../ProtosFactory.TransactionEventHeader.html      |   985 ++
 ...tosFactory.TransactionEventHeaderOrBuilder.html |   333 +
 .../flume/channel/file/proto/ProtosFactory.html    |   387 +
 .../class-use/ProtosFactory.ActiveLog.Builder.html |   289 +
 .../proto/class-use/ProtosFactory.ActiveLog.html   |   341 +
 .../ProtosFactory.ActiveLogOrBuilder.html          |   228 +
 .../ProtosFactory.Checkpoint.Builder.html          |   309 +
 .../proto/class-use/ProtosFactory.Checkpoint.html  |   270 +
 .../ProtosFactory.CheckpointOrBuilder.html         |   174 +
 .../class-use/ProtosFactory.Commit.Builder.html    |   215 +
 .../file/proto/class-use/ProtosFactory.Commit.html |   270 +
 .../class-use/ProtosFactory.CommitOrBuilder.html   |   174 +
 .../ProtosFactory.FlumeEvent.Builder.html          |   294 +
 .../proto/class-use/ProtosFactory.FlumeEvent.html  |   300 +
 .../ProtosFactory.FlumeEventHeader.Builder.html    |   301 +
 .../class-use/ProtosFactory.FlumeEventHeader.html  |   341 +
 .../ProtosFactory.FlumeEventHeaderOrBuilder.html   |   228 +
 .../ProtosFactory.FlumeEventOrBuilder.html         |   201 +
 .../ProtosFactory.LogFileEncryption.Builder.html   |   272 +
 .../class-use/ProtosFactory.LogFileEncryption.html |   300 +
 .../ProtosFactory.LogFileEncryptionOrBuilder.html  |   201 +
 .../ProtosFactory.LogFileMetaData.Builder.html     |   299 +
 .../class-use/ProtosFactory.LogFileMetaData.html   |   270 +
 .../ProtosFactory.LogFileMetaDataOrBuilder.html    |   174 +
 .../proto/class-use/ProtosFactory.Put.Builder.html |   239 +
 .../file/proto/class-use/ProtosFactory.Put.html    |   270 +
 .../class-use/ProtosFactory.PutOrBuilder.html      |   174 +
 .../class-use/ProtosFactory.Rollback.Builder.html  |   203 +
 .../proto/class-use/ProtosFactory.Rollback.html    |   270 +
 .../class-use/ProtosFactory.RollbackOrBuilder.html |   174 +
 .../class-use/ProtosFactory.Take.Builder.html      |   227 +
 .../file/proto/class-use/ProtosFactory.Take.html   |   270 +
 .../class-use/ProtosFactory.TakeOrBuilder.html     |   174 +
 ...otosFactory.TransactionEventFooter.Builder.html |   203 +
 .../ProtosFactory.TransactionEventFooter.html      |   270 +
 ...tosFactory.TransactionEventFooterOrBuilder.html |   174 +
 ...otosFactory.TransactionEventHeader.Builder.html |   239 +
 .../ProtosFactory.TransactionEventHeader.html      |   270 +
 ...tosFactory.TransactionEventHeaderOrBuilder.html |   174 +
 .../file/proto/class-use/ProtosFactory.html        |   126 +
 .../flume/channel/file/proto/package-frame.html    |    60 +
 .../flume/channel/file/proto/package-summary.html  |   347 +
 .../flume/channel/file/proto/package-tree.html     |   210 +
 .../flume/channel/file/proto/package-use.html      |   312 +
 .../flume/channel/jdbc/ConfigurationConstants.html |   854 ++
 .../apache/flume/channel/jdbc/DatabaseType.html    |   450 +
 .../org/apache/flume/channel/jdbc/JdbcChannel.html |   434 +
 .../flume/channel/jdbc/JdbcChannelException.html   |   300 +
 .../flume/channel/jdbc/JdbcChannelProvider.html    |   316 +
 .../channel/jdbc/JdbcChannelProviderFactory.html   |   254 +
 .../flume/channel/jdbc/TransactionIsolation.html   |   423 +
 .../jdbc/class-use/ConfigurationConstants.html     |   126 +
 .../flume/channel/jdbc/class-use/DatabaseType.html |   202 +
 .../flume/channel/jdbc/class-use/JdbcChannel.html  |   126 +
 .../jdbc/class-use/JdbcChannelException.html       |   126 +
 .../jdbc/class-use/JdbcChannelProvider.html        |   189 +
 .../jdbc/class-use/JdbcChannelProviderFactory.html |   126 +
 .../jdbc/class-use/TransactionIsolation.html       |   179 +
 .../channel/jdbc/impl/DerbySchemaHandler.html      |  1086 ++
 .../channel/jdbc/impl/JdbcChannelProviderImpl.html |   401 +
 .../channel/jdbc/impl/JdbcTransactionFactory.html  |   293 +
 .../channel/jdbc/impl/JdbcTransactionImpl.html     |   424 +
 .../channel/jdbc/impl/MySQLSchemaHandler.html      |   419 +
 .../jdbc/impl/PersistableEvent.Builder.html        |   378 +
 .../jdbc/impl/PersistableEvent.HeaderEntry.html    |   344 +
 .../impl/PersistableEvent.SpillableString.html     |   318 +
 .../flume/channel/jdbc/impl/PersistableEvent.html  |   460 +
 .../flume/channel/jdbc/impl/SchemaHandler.html     |   349 +
 .../channel/jdbc/impl/SchemaHandlerFactory.html    |   242 +
 .../jdbc/impl/class-use/DerbySchemaHandler.html    |   126 +
 .../impl/class-use/JdbcChannelProviderImpl.html    |   170 +
 .../impl/class-use/JdbcTransactionFactory.html     |   166 +
 .../jdbc/impl/class-use/JdbcTransactionImpl.html   |   170 +
 .../jdbc/impl/class-use/MySQLSchemaHandler.html    |   126 +
 .../impl/class-use/PersistableEvent.Builder.html   |   194 +
 .../class-use/PersistableEvent.HeaderEntry.html    |   166 +
 .../PersistableEvent.SpillableString.html          |   170 +
 .../jdbc/impl/class-use/PersistableEvent.html      |   209 +
 .../channel/jdbc/impl/class-use/SchemaHandler.html |   187 +
 .../jdbc/impl/class-use/SchemaHandlerFactory.html  |   126 +
 .../flume/channel/jdbc/impl/package-frame.html     |    34 +
 .../flume/channel/jdbc/impl/package-summary.html   |   203 +
 .../flume/channel/jdbc/impl/package-tree.html      |   156 +
 .../flume/channel/jdbc/impl/package-use.html       |   183 +
 .../apache/flume/channel/jdbc/package-frame.html   |    36 +
 .../apache/flume/channel/jdbc/package-summary.html |   208 +
 .../apache/flume/channel/jdbc/package-tree.html    |   179 +
 .../org/apache/flume/channel/jdbc/package-use.html |   191 +
 .../apache/flume/channel/kafka/KafkaChannel.html   |   436 +
 .../channel/kafka/KafkaChannelConfiguration.html   |   843 ++
 .../channel/kafka/class-use/KafkaChannel.html      |   126 +
 .../kafka/class-use/KafkaChannelConfiguration.html |   126 +
 .../apache/flume/channel/kafka/package-frame.html  |    22 +
 .../flume/channel/kafka/package-summary.html       |   148 +
 .../apache/flume/channel/kafka/package-tree.html   |   148 +
 .../apache/flume/channel/kafka/package-use.html    |   126 +
 .../org/apache/flume/channel/package-frame.html    |    40 +
 .../org/apache/flume/channel/package-summary.html  |   258 +
 .../org/apache/flume/channel/package-tree.html     |   179 +
 .../org/apache/flume/channel/package-use.html      |   320 +
 .../org/apache/flume/class-use/Channel.html        |   592 +
 .../apache/flume/class-use/ChannelException.html   |   338 +
 .../org/apache/flume/class-use/ChannelFactory.html |   166 +
 .../flume/class-use/ChannelFullException.html      |   126 +
 .../apache/flume/class-use/ChannelSelector.html    |   216 +
 .../apidocs/org/apache/flume/class-use/Clock.html  |   203 +
 .../org/apache/flume/class-use/Constants.html      |   126 +
 .../org/apache/flume/class-use/Context.html        |  1425 ++
 .../org/apache/flume/class-use/CounterGroup.html   |   188 +
 .../apidocs/org/apache/flume/class-use/Dummy.html  |   126 +
 .../apidocs/org/apache/flume/class-use/Event.html  |  1627 +++
 .../flume/class-use/EventDeliveryException.html    |   599 +
 .../apache/flume/class-use/EventDrivenSource.html  |   372 +
 .../org/apache/flume/class-use/FlumeException.html |   793 ++
 .../org/apache/flume/class-use/NamedComponent.html |   965 ++
 .../flume/class-use/PollableSource.Status.html     |   282 +
 .../org/apache/flume/class-use/PollableSource.html |   254 +
 .../org/apache/flume/class-use/Sink.Status.html    |   420 +
 .../apidocs/org/apache/flume/class-use/Sink.html   |   587 +
 .../org/apache/flume/class-use/SinkFactory.html    |   166 +
 .../org/apache/flume/class-use/SinkProcessor.html  |   266 +
 .../flume/class-use/SinkRunner.PollingRunner.html  |   126 +
 .../org/apache/flume/class-use/SinkRunner.html     |   189 +
 .../apidocs/org/apache/flume/class-use/Source.html |   596 +
 .../org/apache/flume/class-use/SourceFactory.html  |   166 +
 .../org/apache/flume/class-use/SourceRunner.html   |   247 +
 .../org/apache/flume/class-use/SystemClock.html    |   126 +
 .../class-use/Transaction.TransactionState.html    |   175 +
 .../org/apache/flume/class-use/Transaction.html    |   274 +
 .../apache/flume/class-use/VersionAnnotation.html  |   126 +
 .../apache/flume/client/avro/AvroCLIClient.html    |   275 +
 .../org/apache/flume/client/avro/EventReader.html  |   300 +
 .../flume/client/avro/ReliableEventReader.html     |   255 +
 .../ReliableSpoolingFileEventReader.Builder.html   |   517 +
 ...ableSpoolingFileEventReader.TrackingPolicy.html |   349 +
 .../avro/ReliableSpoolingFileEventReader.html      |   421 +
 .../client/avro/SimpleTextLineEventReader.html     |   351 +
 .../flume/client/avro/class-use/AvroCLIClient.html |   126 +
 .../flume/client/avro/class-use/EventReader.html   |   214 +
 .../client/avro/class-use/ReliableEventReader.html |   192 +
 .../ReliableSpoolingFileEventReader.Builder.html   |   234 +
 ...ableSpoolingFileEventReader.TrackingPolicy.html |   175 +
 .../class-use/ReliableSpoolingFileEventReader.html |   187 +
 .../avro/class-use/SimpleTextLineEventReader.html  |   126 +
 .../apache/flume/client/avro/package-frame.html    |    33 +
 .../apache/flume/client/avro/package-summary.html  |   204 +
 .../org/apache/flume/client/avro/package-tree.html |   170 +
 .../org/apache/flume/client/avro/package-use.html  |   232 +
 .../log4jappender/LoadBalancingLog4jAppender.html  |   437 +
 .../flume/clients/log4jappender/Log4jAppender.html |   591 +
 .../clients/log4jappender/Log4jAvroHeaders.html    |   458 +
 .../class-use/LoadBalancingLog4jAppender.html      |   126 +
 .../log4jappender/class-use/Log4jAppender.html     |   169 +
 .../log4jappender/class-use/Log4jAvroHeaders.html  |   179 +
 .../flume/clients/log4jappender/package-frame.html |    26 +
 .../clients/log4jappender/package-summary.html     |   169 +
 .../flume/clients/log4jappender/package-tree.html  |   159 +
 .../flume/clients/log4jappender/package-use.html   |   165 +
 .../flume/conf/BasicConfigurationConstants.html    |   483 +
 .../org/apache/flume/conf/BatchSizeSupported.html  |   232 +
 .../conf/ComponentConfiguration.ComponentType.html |   432 +
 .../apache/flume/conf/ComponentConfiguration.html  |   509 +
 .../flume/conf/ComponentConfigurationFactory.html  |   282 +
 .../apache/flume/conf/ComponentWithClassName.html  |   227 +
 .../org/apache/flume/conf/ConfigFilterFactory.html |   298 +
 .../org/apache/flume/conf/Configurable.html        |   254 +
 .../apache/flume/conf/ConfigurableComponent.html   |   231 +
 .../org/apache/flume/conf/Configurables.html       |   333 +
 .../apache/flume/conf/ConfigurationException.html  |   300 +
 .../FlumeConfiguration.AgentConfiguration.html     |   438 +
 ...umeConfiguration.ComponentNameAndConfigKey.html |   256 +
 .../org/apache/flume/conf/FlumeConfiguration.html  |   396 +
 .../FlumeConfigurationError.ErrorOrWarning.html    |   347 +
 .../apache/flume/conf/FlumeConfigurationError.html |   346 +
 .../flume/conf/FlumeConfigurationErrorType.html    |   488 +
 .../org/apache/flume/conf/LogPrivacyUtil.html      |   372 +
 .../flume/conf/TransactionCapacitySupported.html   |   232 +
 ...nnelConfiguration.ChannelConfigurationType.html |   423 +
 .../flume/conf/channel/ChannelConfiguration.html   |   292 +
 ...iguration.ChannelSelectorConfigurationType.html |   390 +
 .../conf/channel/ChannelSelectorConfiguration.html |   369 +
 .../flume/conf/channel/ChannelSelectorType.html    |   415 +
 .../org/apache/flume/conf/channel/ChannelType.html |   442 +
 ...nnelConfiguration.ChannelConfigurationType.html |   175 +
 .../channel/class-use/ChannelConfiguration.html    |   166 +
 ...iguration.ChannelSelectorConfigurationType.html |   175 +
 .../class-use/ChannelSelectorConfiguration.html    |   224 +
 .../channel/class-use/ChannelSelectorType.html     |   175 +
 .../flume/conf/channel/class-use/ChannelType.html  |   175 +
 .../apache/flume/conf/channel/package-frame.html   |    29 +
 .../apache/flume/conf/channel/package-summary.html |   179 +
 .../apache/flume/conf/channel/package-tree.html    |   159 +
 .../org/apache/flume/conf/channel/package-use.html |   216 +
 .../class-use/BasicConfigurationConstants.html     |   126 +
 .../flume/conf/class-use/BatchSizeSupported.html   |   488 +
 .../ComponentConfiguration.ComponentType.html      |   190 +
 .../conf/class-use/ComponentConfiguration.html     |   428 +
 .../class-use/ComponentConfigurationFactory.html   |   126 +
 .../conf/class-use/ComponentWithClassName.html     |   248 +
 .../flume/conf/class-use/ConfigFilterFactory.html  |   126 +
 .../apache/flume/conf/class-use/Configurable.html  |  1397 ++
 .../conf/class-use/ConfigurableComponent.html      |   333 +
 .../apache/flume/conf/class-use/Configurables.html |   126 +
 .../conf/class-use/ConfigurationException.html     |   331 +
 .../FlumeConfiguration.AgentConfiguration.html     |   166 +
 ...umeConfiguration.ComponentNameAndConfigKey.html |   126 +
 .../flume/conf/class-use/FlumeConfiguration.html   |   182 +
 .../FlumeConfigurationError.ErrorOrWarning.html    |   195 +
 .../conf/class-use/FlumeConfigurationError.html    |   183 +
 .../class-use/FlumeConfigurationErrorType.html     |   195 +
 .../flume/conf/class-use/LogPrivacyUtil.html       |   126 +
 .../class-use/TransactionCapacitySupported.html    |   204 +
 ...onfiguration.ConfigFilterConfigurationType.html |   402 +
 .../configfilter/ConfigFilterConfiguration.html    |   294 +
 .../flume/conf/configfilter/ConfigFilterType.html  |   385 +
 ...onfiguration.ConfigFilterConfigurationType.html |   175 +
 .../class-use/ConfigFilterConfiguration.html       |   166 +
 .../configfilter/class-use/ConfigFilterType.html   |   175 +
 .../flume/conf/configfilter/package-frame.html     |    26 +
 .../flume/conf/configfilter/package-summary.html   |   163 +
 .../flume/conf/configfilter/package-tree.html      |   156 +
 .../flume/conf/configfilter/package-use.html       |   165 +
 .../org/apache/flume/conf/package-frame.html       |    48 +
 .../org/apache/flume/conf/package-summary.html     |   270 +
 .../org/apache/flume/conf/package-tree.html        |   187 +
 .../apidocs/org/apache/flume/conf/package-use.html |  1102 ++
 .../SinkConfiguration.SinkConfigurationType.html   |   627 +
 .../apache/flume/conf/sink/SinkConfiguration.html  |   404 +
 .../flume/conf/sink/SinkGroupConfiguration.html    |   400 +
 ...nfiguration.SinkProcessorConfigurationType.html |   407 +
 .../conf/sink/SinkProcessorConfiguration.html      |   389 +
 .../apache/flume/conf/sink/SinkProcessorType.html  |   426 +
 .../org/apache/flume/conf/sink/SinkType.html       |   628 +
 .../SinkConfiguration.SinkConfigurationType.html   |   175 +
 .../conf/sink/class-use/SinkConfiguration.html     |   166 +
 .../sink/class-use/SinkGroupConfiguration.html     |   126 +
 ...nfiguration.SinkProcessorConfigurationType.html |   175 +
 .../sink/class-use/SinkProcessorConfiguration.html |   183 +
 .../conf/sink/class-use/SinkProcessorType.html     |   175 +
 .../apache/flume/conf/sink/class-use/SinkType.html |   175 +
 .../org/apache/flume/conf/sink/package-frame.html  |    30 +
 .../apache/flume/conf/sink/package-summary.html    |   181 +
 .../org/apache/flume/conf/sink/package-tree.html   |   160 +
 .../org/apache/flume/conf/sink/package-use.html    |   176 +
 ...ourceConfiguration.SourceConfigurationType.html |   606 +
 .../flume/conf/source/SourceConfiguration.html     |   417 +
 .../org/apache/flume/conf/source/SourceType.html   |   617 +
 ...ourceConfiguration.SourceConfigurationType.html |   175 +
 .../conf/source/class-use/SourceConfiguration.html |   166 +
 .../flume/conf/source/class-use/SourceType.html    |   175 +
 .../apache/flume/conf/source/package-frame.html    |    26 +
 .../apache/flume/conf/source/package-summary.html  |   165 +
 .../org/apache/flume/conf/source/package-tree.html |   156 +
 .../org/apache/flume/conf/source/package-use.html  |   167 +
 .../flume/configfilter/AbstractConfigFilter.html   |   321 +
 .../apache/flume/configfilter/ConfigFilter.html    |   297 +
 .../EnvironmentVariableConfigFilter.html           |   320 +
 .../configfilter/ExternalProcessConfigFilter.html  |   320 +
 .../HadoopCredentialStoreConfigFilter.html         |   320 +
 .../class-use/AbstractConfigFilter.html            |   174 +
 .../flume/configfilter/class-use/ConfigFilter.html |   216 +
 .../class-use/EnvironmentVariableConfigFilter.html |   126 +
 .../class-use/ExternalProcessConfigFilter.html     |   126 +
 .../HadoopCredentialStoreConfigFilter.html         |   126 +
 .../apache/flume/configfilter/package-frame.html   |    28 +
 .../apache/flume/configfilter/package-summary.html |   175 +
 .../apache/flume/configfilter/package-tree.html    |   149 +
 .../org/apache/flume/configfilter/package-use.html |   187 +
 .../org/apache/flume/event/EventBuilder.html       |   330 +
 .../org/apache/flume/event/EventHelper.html        |   288 +
 .../apidocs/org/apache/flume/event/JSONEvent.html  |   366 +
 .../org/apache/flume/event/SimpleEvent.html        |   370 +
 .../apache/flume/event/class-use/EventBuilder.html |   126 +
 .../apache/flume/event/class-use/EventHelper.html  |   126 +
 .../apache/flume/event/class-use/JSONEvent.html    |   126 +
 .../apache/flume/event/class-use/SimpleEvent.html  |   126 +
 .../org/apache/flume/event/package-frame.html      |    24 +
 .../org/apache/flume/event/package-summary.html    |   156 +
 .../org/apache/flume/event/package-tree.html       |   142 +
 .../org/apache/flume/event/package-use.html        |   126 +
 .../apache/flume/formatter/output/BucketPath.html  |   718 +
 .../output/DefaultPathManager.Builder.html         |   286 +
 .../flume/formatter/output/DefaultPathManager.html |   468 +
 .../flume/formatter/output/EventFormatter.html     |   232 +
 .../formatter/output/PathManager.Builder.html      |   233 +
 .../apache/flume/formatter/output/PathManager.html |   345 +
 .../flume/formatter/output/PathManagerFactory.html |   276 +
 .../flume/formatter/output/PathManagerType.html    |   370 +
 .../output/RollTimePathManager.Builder.html        |   286 +
 .../formatter/output/RollTimePathManager.html      |   336 +
 .../output/TextDelimitedOutputFormatter.html       |   290 +
 .../formatter/output/class-use/BucketPath.html     |   126 +
 .../class-use/DefaultPathManager.Builder.html      |   126 +
 .../output/class-use/DefaultPathManager.html       |   166 +
 .../formatter/output/class-use/EventFormatter.html |   168 +
 .../output/class-use/PathManager.Builder.html      |   183 +
 .../formatter/output/class-use/PathManager.html    |   196 +
 .../output/class-use/PathManagerFactory.html       |   126 +
 .../output/class-use/PathManagerType.html          |   175 +
 .../class-use/RollTimePathManager.Builder.html     |   126 +
 .../output/class-use/RollTimePathManager.html      |   126 +
 .../class-use/TextDelimitedOutputFormatter.html    |   126 +
 .../flume/formatter/output/package-frame.html      |    37 +
 .../flume/formatter/output/package-summary.html    |   213 +
 .../flume/formatter/output/package-tree.html       |   166 +
 .../apache/flume/formatter/output/package-use.html |   178 +
 .../flume/instrumentation/ChannelCounter.html      |   514 +
 .../flume/instrumentation/ChannelCounterMBean.html |   353 +
 .../instrumentation/ChannelProcessorCounter.html   |   268 +
 .../GangliaServer.GangliaCollector.html            |   292 +
 .../flume/instrumentation/GangliaServer.html       |   759 ++
 .../flume/instrumentation/MonitorService.html      |   256 +
 .../MonitoredCounterGroup.Type.html                |   423 +
 .../instrumentation/MonitoredCounterGroup.html     |   511 +
 .../flume/instrumentation/MonitoringType.html      |   369 +
 .../apache/flume/instrumentation/SinkCounter.html  |   656 +
 .../flume/instrumentation/SinkCounterMBean.html    |   388 +
 .../instrumentation/SinkProcessorCounter.html      |   268 +
 .../flume/instrumentation/SourceCounter.html       |   656 +
 .../flume/instrumentation/SourceCounterMBean.html  |   388 +
 .../instrumentation/class-use/ChannelCounter.html  |   210 +
 .../class-use/ChannelCounterMBean.html             |   223 +
 .../class-use/ChannelProcessorCounter.html         |   126 +
 .../class-use/GangliaServer.GangliaCollector.html  |   166 +
 .../instrumentation/class-use/GangliaServer.html   |   126 +
 .../instrumentation/class-use/MonitorService.html  |   207 +
 .../class-use/MonitoredCounterGroup.Type.html      |   188 +
 .../class-use/MonitoredCounterGroup.html           |   234 +
 .../instrumentation/class-use/MonitoringType.html  |   175 +
 .../instrumentation/class-use/SinkCounter.html     |   188 +
 .../class-use/SinkCounterMBean.html                |   188 +
 .../class-use/SinkProcessorCounter.html            |   126 +
 .../instrumentation/class-use/SourceCounter.html   |   244 +
 .../class-use/SourceCounterMBean.html              |   188 +
 .../instrumentation/http/HTTPMetricsServer.html    |   387 +
 .../http/class-use/HTTPMetricsServer.html          |   126 +
 .../flume/instrumentation/http/package-frame.html  |    21 +
 .../instrumentation/http/package-summary.html      |   147 +
 .../flume/instrumentation/http/package-tree.html   |   139 +
 .../flume/instrumentation/http/package-use.html    |   126 +
 .../instrumentation/kafka/KafkaChannelCounter.html |   438 +
 .../kafka/KafkaChannelCounterMBean.html            |   396 +
 .../instrumentation/kafka/KafkaSinkCounter.html    |   378 +
 .../kafka/KafkaSinkCounterMBean.html               |   383 +
 .../instrumentation/kafka/KafkaSourceCounter.html  |   408 +
 .../kafka/KafkaSourceCounterMBean.html             |   383 +
 .../kafka/class-use/KafkaChannelCounter.html       |   126 +
 .../kafka/class-use/KafkaChannelCounterMBean.html  |   166 +
 .../kafka/class-use/KafkaSinkCounter.html          |   126 +
 .../kafka/class-use/KafkaSinkCounterMBean.html     |   166 +
 .../kafka/class-use/KafkaSourceCounter.html        |   126 +
 .../kafka/class-use/KafkaSourceCounterMBean.html   |   166 +
 .../flume/instrumentation/kafka/package-frame.html |    29 +
 .../instrumentation/kafka/package-summary.html     |   175 +
 .../flume/instrumentation/kafka/package-tree.html  |   163 +
 .../flume/instrumentation/kafka/package-use.html   |   165 +
 .../flume/instrumentation/package-frame.html       |    39 +
 .../flume/instrumentation/package-summary.html     |   234 +
 .../apache/flume/instrumentation/package-tree.html |   173 +
 .../apache/flume/instrumentation/package-use.html  |   391 +
 .../flume/instrumentation/util/JMXPollUtil.html    |   273 +
 .../util/class-use/JMXPollUtil.html                |   126 +
 .../flume/instrumentation/util/package-frame.html  |    21 +
 .../instrumentation/util/package-summary.html      |   144 +
 .../flume/instrumentation/util/package-tree.html   |   139 +
 .../flume/instrumentation/util/package-use.html    |   126 +
 .../flume/interceptor/HostInterceptor.Builder.html |   318 +
 .../interceptor/HostInterceptor.Constants.html     |   344 +
 .../apache/flume/interceptor/HostInterceptor.html  |   374 +
 .../flume/interceptor/Interceptor.Builder.html     |   244 +
 .../org/apache/flume/interceptor/Interceptor.html  |   317 +
 .../interceptor/InterceptorBuilderFactory.html     |   287 +
 .../apache/flume/interceptor/InterceptorChain.html |   391 +
 .../apache/flume/interceptor/InterceptorType.html  |   416 +
 .../RegexExtractorInterceptor.Builder.html         |   317 +
 .../interceptor/RegexExtractorInterceptor.html     |   408 +
 .../RegexExtractorInterceptorMillisSerializer.html |   336 +
 ...xExtractorInterceptorPassThroughSerializer.html |   335 +
 .../RegexExtractorInterceptorSerializer.html       |   254 +
 .../RegexFilteringInterceptor.Builder.html         |   318 +
 .../RegexFilteringInterceptor.Constants.html       |   334 +
 .../interceptor/RegexFilteringInterceptor.html     |   382 +
 .../RemoveHeaderInterceptor.Builder.html           |   322 +
 .../flume/interceptor/RemoveHeaderInterceptor.html |   371 +
 .../SearchAndReplaceInterceptor.Builder.html       |   317 +
 .../interceptor/SearchAndReplaceInterceptor.html   |   360 +
 .../interceptor/StaticInterceptor.Builder.html     |   318 +
 .../interceptor/StaticInterceptor.Constants.html   |   368 +
 .../flume/interceptor/StaticInterceptor.html       |   370 +
 .../interceptor/TimestampInterceptor.Builder.html  |   318 +
 .../TimestampInterceptor.Constants.html            |   334 +
 .../flume/interceptor/TimestampInterceptor.html    |   352 +
 .../class-use/HostInterceptor.Builder.html         |   126 +
 .../class-use/HostInterceptor.Constants.html       |   126 +
 .../interceptor/class-use/HostInterceptor.html     |   126 +
 .../interceptor/class-use/Interceptor.Builder.html |   258 +
 .../flume/interceptor/class-use/Interceptor.html   |   303 +
 .../class-use/InterceptorBuilderFactory.html       |   126 +
 .../interceptor/class-use/InterceptorChain.html    |   126 +
 .../interceptor/class-use/InterceptorType.html     |   175 +
 .../RegexExtractorInterceptor.Builder.html         |   126 +
 .../class-use/RegexExtractorInterceptor.html       |   126 +
 .../RegexExtractorInterceptorMillisSerializer.html |   126 +
 ...xExtractorInterceptorPassThroughSerializer.html |   126 +
 .../RegexExtractorInterceptorSerializer.html       |   175 +
 .../RegexFilteringInterceptor.Builder.html         |   126 +
 .../RegexFilteringInterceptor.Constants.html       |   126 +
 .../class-use/RegexFilteringInterceptor.html       |   126 +
 .../class-use/RemoveHeaderInterceptor.Builder.html |   126 +
 .../class-use/RemoveHeaderInterceptor.html         |   126 +
 .../SearchAndReplaceInterceptor.Builder.html       |   126 +
 .../class-use/SearchAndReplaceInterceptor.html     |   126 +
 .../class-use/StaticInterceptor.Builder.html       |   126 +
 .../class-use/StaticInterceptor.Constants.html     |   126 +
 .../interceptor/class-use/StaticInterceptor.html   |   126 +
 .../class-use/TimestampInterceptor.Builder.html    |   126 +
 .../class-use/TimestampInterceptor.Constants.html  |   126 +
 .../class-use/TimestampInterceptor.html            |   126 +
 .../apache/flume/interceptor/package-frame.html    |    52 +
 .../apache/flume/interceptor/package-summary.html  |   313 +
 .../org/apache/flume/interceptor/package-tree.html |   187 +
 .../org/apache/flume/interceptor/package-use.html  |   197 +
 .../org/apache/flume/lifecycle/LifecycleAware.html |   324 +
 .../flume/lifecycle/LifecycleController.html       |   362 +
 .../apache/flume/lifecycle/LifecycleException.html |   302 +
 .../org/apache/flume/lifecycle/LifecycleState.html |   416 +
 .../LifecycleSupervisor.MonitorRunnable.html       |   348 +
 .../lifecycle/LifecycleSupervisor.Status.html      |   395 +
 ...visor.SupervisorPolicy.AlwaysRestartPolicy.html |   263 +
 ...Supervisor.SupervisorPolicy.OnceOnlyPolicy.html |   263 +
 .../LifecycleSupervisor.SupervisorPolicy.html      |   270 +
 .../flume/lifecycle/LifecycleSupervisor.html       |   443 +
 .../flume/lifecycle/class-use/LifecycleAware.html  |  1192 ++
 .../lifecycle/class-use/LifecycleController.html   |   126 +
 .../lifecycle/class-use/LifecycleException.html    |   126 +
 .../flume/lifecycle/class-use/LifecycleState.html  |   404 +
 .../LifecycleSupervisor.MonitorRunnable.html       |   126 +
 .../class-use/LifecycleSupervisor.Status.html      |   126 +
 ...visor.SupervisorPolicy.AlwaysRestartPolicy.html |   126 +
 ...Supervisor.SupervisorPolicy.OnceOnlyPolicy.html |   126 +
 .../LifecycleSupervisor.SupervisorPolicy.html      |   185 +
 .../lifecycle/class-use/LifecycleSupervisor.html   |   126 +
 .../org/apache/flume/lifecycle/package-frame.html  |    39 +
 .../apache/flume/lifecycle/package-summary.html    |   217 +
 .../org/apache/flume/lifecycle/package-tree.html   |   173 +
 .../org/apache/flume/lifecycle/package-use.html    |   765 ++
 .../org/apache/flume/netty/filter/PatternRule.html |   334 +
 .../flume/netty/filter/class-use/PatternRule.html  |   126 +
 .../apache/flume/netty/filter/package-frame.html   |    21 +
 .../apache/flume/netty/filter/package-summary.html |   146 +
 .../apache/flume/netty/filter/package-tree.html    |   139 +
 .../org/apache/flume/netty/filter/package-use.html |   126 +
 .../flume/node/AbstractConfigurationProvider.html  |   325 +
 .../AbstractZooKeeperConfigurationProvider.html    |   392 +
 .../apidocs/org/apache/flume/node/Application.html |   381 +
 .../flume/node/ClasspathConfigurationSource.html   |   373 +
 .../node/ClasspathConfigurationSourceFactory.html  |   311 +
 .../apache/flume/node/ConfigurationProvider.html   |   227 +
 .../org/apache/flume/node/ConfigurationSource.html |   386 +
 .../flume/node/ConfigurationSourceFactory.html     |   262 +
 .../flume/node/EnvVarResolverProperties.html       |   368 +
 .../apache/flume/node/FileConfigurationSource.html |   389 +
 .../flume/node/FileConfigurationSourceFactory.html |   311 +
 .../apache/flume/node/HttpConfigurationSource.html |   393 +
 .../flume/node/HttpConfigurationSourceFactory.html |   311 +
 .../org/apache/flume/node/Initializable.html       |   232 +
 .../flume/node/MaterializedConfiguration.html      |   301 +
 ...PollingPropertiesFileConfigurationProvider.html |   280 +
 .../PollingZooKeeperConfigurationProvider.html     |   407 +
 .../node/PropertiesFileConfigurationProvider.html  |   414 +
 .../node/SimpleMaterializedConfiguration.html      |   390 +
 .../node/StaticZooKeeperConfigurationProvider.html |   324 +
 .../flume/node/UriConfigurationProvider.html       |   537 +
 .../class-use/AbstractConfigurationProvider.html   |   200 +
 .../AbstractZooKeeperConfigurationProvider.html    |   170 +
 .../apache/flume/node/class-use/Application.html   |   126 +
 .../class-use/ClasspathConfigurationSource.html    |   126 +
 .../ClasspathConfigurationSourceFactory.html       |   126 +
 .../node/class-use/ConfigurationProvider.html      |   204 +
 .../flume/node/class-use/ConfigurationSource.html  |   241 +
 .../node/class-use/ConfigurationSourceFactory.html |   180 +
 .../node/class-use/EnvVarResolverProperties.html   |   126 +
 .../node/class-use/FileConfigurationSource.html    |   126 +
 .../class-use/FileConfigurationSourceFactory.html  |   126 +
 .../node/class-use/HttpConfigurationSource.html    |   126 +
 .../class-use/HttpConfigurationSourceFactory.html  |   126 +
 .../apache/flume/node/class-use/Initializable.html |   126 +
 .../node/class-use/MaterializedConfiguration.html  |   202 +
 ...PollingPropertiesFileConfigurationProvider.html |   126 +
 .../PollingZooKeeperConfigurationProvider.html     |   126 +
 .../PropertiesFileConfigurationProvider.html       |   126 +
 .../class-use/SimpleMaterializedConfiguration.html |   126 +
 .../StaticZooKeeperConfigurationProvider.html      |   126 +
 .../node/class-use/UriConfigurationProvider.html   |   178 +
 .../flume/node/net/AuthorizationProvider.html      |   228 +
 .../flume/node/net/BasicAuthorizationProvider.html |   285 +
 .../apache/flume/node/net/LaxHostnameVerifier.html |   290 +
 .../flume/node/net/UrlConnectionFactory.html       |   380 +
 .../node/net/class-use/AuthorizationProvider.html  |   250 +
 .../net/class-use/BasicAuthorizationProvider.html  |   126 +
 .../node/net/class-use/LaxHostnameVerifier.html    |   126 +
 .../node/net/class-use/UrlConnectionFactory.html   |   126 +
 .../org/apache/flume/node/net/package-frame.html   |    27 +
 .../org/apache/flume/node/net/package-summary.html |   175 +
 .../org/apache/flume/node/net/package-tree.html    |   145 +
 .../org/apache/flume/node/net/package-use.html     |   182 +
 .../org/apache/flume/node/package-frame.html       |    44 +
 .../org/apache/flume/node/package-summary.html     |   263 +
 .../org/apache/flume/node/package-tree.html        |   183 +
 .../apidocs/org/apache/flume/node/package-use.html |   190 +
 .../apidocs/org/apache/flume/package-frame.html    |    62 +
 .../apidocs/org/apache/flume/package-summary.html  |   372 +
 .../apidocs/org/apache/flume/package-tree.html     |   224 +
 .../apidocs/org/apache/flume/package-use.html      |  1894 +++
 .../serialization/AbstractAvroEventSerializer.html |   543 +
 .../AvroEventDeserializer.AvroSchemaType.html      |   347 +
 .../AvroEventDeserializer.Builder.html             |   288 +
 .../flume/serialization/AvroEventDeserializer.html |   485 +
 .../AvroEventSerializerConfigurationConstants.html |   370 +
 .../BodyTextEventSerializer.Builder.html           |   288 +
 .../serialization/BodyTextEventSerializer.html     |   412 +
 .../flume/serialization/DecodeErrorPolicy.html     |   357 +
 .../serialization/DurablePositionTracker.html      |   350 +
 .../serialization/EventDeserializer.Builder.html   |   235 +
 .../flume/serialization/EventDeserializer.html     |   388 +
 .../serialization/EventDeserializerFactory.html    |   279 +
 .../flume/serialization/EventDeserializerType.html |   370 +
 .../flume/serialization/EventSerDe.Builder.html    |   233 +
 .../org/apache/flume/serialization/EventSerDe.html |   250 +
 .../serialization/EventSerializer.Builder.html     |   235 +
 .../flume/serialization/EventSerializer.html       |   467 +
 .../serialization/EventSerializerFactory.html      |   279 +
 .../flume/serialization/EventSerializerType.html   |   382 +
 .../FlumeEventAvroEventSerializer.Builder.html     |   288 +
 .../FlumeEventAvroEventSerializer.html             |   343 +
 .../HeaderAndBodyTextEventSerializer.Builder.html  |   288 +
 .../HeaderAndBodyTextEventSerializer.html          |   412 +
 .../flume/serialization/LengthMeasurable.html      |   235 +
 .../serialization/LineDeserializer.Builder.html    |   288 +
 .../flume/serialization/LineDeserializer.html      |   496 +
 .../flume/serialization/PositionTracker.html       |   288 +
 .../apache/flume/serialization/RemoteMarkable.html |   259 +
 .../org/apache/flume/serialization/Resettable.html |   269 +
 .../serialization/ResettableFileInputStream.html   |   732 ++
 .../flume/serialization/ResettableInputStream.html |   490 +
 .../org/apache/flume/serialization/Seekable.html   |   250 +
 .../class-use/AbstractAvroEventSerializer.html     |   166 +
 .../AvroEventDeserializer.AvroSchemaType.html      |   175 +
 .../class-use/AvroEventDeserializer.Builder.html   |   126 +
 .../class-use/AvroEventDeserializer.html           |   126 +
 .../AvroEventSerializerConfigurationConstants.html |   126 +
 .../class-use/BodyTextEventSerializer.Builder.html |   126 +
 .../class-use/BodyTextEventSerializer.html         |   126 +
 .../serialization/class-use/DecodeErrorPolicy.html |   212 +
 .../class-use/DurablePositionTracker.html          |   169 +
 .../class-use/EventDeserializer.Builder.html       |   207 +
 .../serialization/class-use/EventDeserializer.html |   246 +
 .../class-use/EventDeserializerFactory.html        |   126 +
 .../class-use/EventDeserializerType.html           |   175 +
 .../class-use/EventSerDe.Builder.html              |   126 +
 .../flume/serialization/class-use/EventSerDe.html  |   168 +
 .../class-use/EventSerializer.Builder.html         |   209 +
 .../serialization/class-use/EventSerializer.html   |   276 +
 .../class-use/EventSerializerFactory.html          |   126 +
 .../class-use/EventSerializerType.html             |   175 +
 .../FlumeEventAvroEventSerializer.Builder.html     |   126 +
 .../class-use/FlumeEventAvroEventSerializer.html   |   126 +
 .../HeaderAndBodyTextEventSerializer.Builder.html  |   126 +
 .../HeaderAndBodyTextEventSerializer.html          |   126 +
 .../serialization/class-use/LengthMeasurable.html  |   168 +
 .../class-use/LineDeserializer.Builder.html        |   126 +
 .../serialization/class-use/LineDeserializer.html  |   126 +
 .../serialization/class-use/PositionTracker.html   |   187 +
 .../serialization/class-use/RemoteMarkable.html    |   168 +
 .../flume/serialization/class-use/Resettable.html  |   237 +
 .../class-use/ResettableFileInputStream.html       |   126 +
 .../class-use/ResettableInputStream.html           |   239 +
 .../flume/serialization/class-use/Seekable.html    |   176 +
 .../apache/flume/serialization/package-frame.html  |    58 +
 .../flume/serialization/package-summary.html       |   337 +
 .../apache/flume/serialization/package-tree.html   |   213 +
 .../apache/flume/serialization/package-use.html    |   325 +
 .../apache/flume/shared/kafka/KafkaSSLUtil.html    |   250 +
 .../flume/shared/kafka/class-use/KafkaSSLUtil.html |   126 +
 .../apache/flume/shared/kafka/package-frame.html   |    21 +
 .../apache/flume/shared/kafka/package-summary.html |   144 +
 .../apache/flume/shared/kafka/package-tree.html    |   139 +
 .../org/apache/flume/shared/kafka/package-use.html |   126 +
 .../shared/kafka/test/KafkaPartitionTestUtil.html  |   414 +
 .../flume/shared/kafka/test/PartitionOption.html   |   355 +
 .../shared/kafka/test/PartitionTestScenario.html   |   367 +
 .../test/class-use/KafkaPartitionTestUtil.html     |   126 +
 .../kafka/test/class-use/PartitionOption.html      |   175 +
 .../test/class-use/PartitionTestScenario.html      |   205 +
 .../flume/shared/kafka/test/package-frame.html     |    26 +
 .../flume/shared/kafka/test/package-summary.html   |   163 +
 .../flume/shared/kafka/test/package-tree.html      |   152 +
 .../flume/shared/kafka/test/package-use.html       |   162 +
 .../org/apache/flume/sink/AbstractRpcSink.html     |   569 +
 .../flume/sink/AbstractSingleSinkProcessor.html    |   439 +
 .../org/apache/flume/sink/AbstractSink.html        |   466 +
 .../apache/flume/sink/AbstractSinkProcessor.html   |   403 +
 .../apache/flume/sink/AbstractSinkSelector.html    |   466 +
 .../apidocs/org/apache/flume/sink/AvroSink.html    |   401 +
 .../org/apache/flume/sink/DefaultSinkFactory.html  |   307 +
 .../apache/flume/sink/DefaultSinkProcessor.html    |   378 +
 .../apache/flume/sink/FailoverSinkProcessor.html   |   389 +
 .../LoadBalancingSinkProcessor.SinkSelector.html   |   296 +
 .../flume/sink/LoadBalancingSinkProcessor.html     |   604 +
 .../apidocs/org/apache/flume/sink/LoggerSink.html  |   431 +
 .../apidocs/org/apache/flume/sink/NullSink.html    |   494 +
 .../org/apache/flume/sink/RollingFileSink.html     |   487 +
 .../apidocs/org/apache/flume/sink/SinkGroup.html   |   331 +
 .../apache/flume/sink/SinkProcessorFactory.html    |   311 +
 .../apidocs/org/apache/flume/sink/ThriftSink.html  |   402 +
 .../flume/sink/class-use/AbstractRpcSink.html      |   178 +
 .../class-use/AbstractSingleSinkProcessor.html     |   169 +
 .../apache/flume/sink/class-use/AbstractSink.html  |   406 +
 .../sink/class-use/AbstractSinkProcessor.html      |   175 +
 .../flume/sink/class-use/AbstractSinkSelector.html |   126 +
 .../org/apache/flume/sink/class-use/AvroSink.html  |   126 +
 .../flume/sink/class-use/DefaultSinkFactory.html   |   126 +
 .../flume/sink/class-use/DefaultSinkProcessor.html |   126 +
 .../sink/class-use/FailoverSinkProcessor.html      |   126 +
 .../LoadBalancingSinkProcessor.SinkSelector.html   |   166 +
 .../sink/class-use/LoadBalancingSinkProcessor.html |   126 +
 .../apache/flume/sink/class-use/LoggerSink.html    |   126 +
 .../org/apache/flume/sink/class-use/NullSink.html  |   126 +
 .../flume/sink/class-use/RollingFileSink.html      |   126 +
 .../org/apache/flume/sink/class-use/SinkGroup.html |   126 +
 .../flume/sink/class-use/SinkProcessorFactory.html |   126 +
 .../apache/flume/sink/class-use/ThriftSink.html    |   126 +
 .../apache/flume/sink/hbase/AsyncHBaseSink.html    |   482 +
 .../sink/hbase/AsyncHbaseEventSerializer.html      |   343 +
 .../org/apache/flume/sink/hbase/BatchAware.html    |   226 +
 .../org/apache/flume/sink/hbase/HBaseSink.html     |   491 +
 .../hbase/HBaseSinkConfigurationConstants.html     |   633 +
 .../flume/sink/hbase/HbaseEventSerializer.html     |   312 +
 .../sink/hbase/RegexHbaseEventSerializer.html      |   733 ++
 .../hbase/SimpleAsyncHbaseEventSerializer.html     |   450 +
 .../hbase/SimpleHbaseEventSerializer.KeyType.html  |   371 +
 .../sink/hbase/SimpleHbaseEventSerializer.html     |   437 +
 .../flume/sink/hbase/SimpleRowKeyGenerator.html    |   334 +
 .../flume/sink/hbase/class-use/AsyncHBaseSink.html |   126 +
 .../hbase/class-use/AsyncHbaseEventSerializer.html |   170 +
 .../flume/sink/hbase/class-use/BatchAware.html     |   126 +
 .../flume/sink/hbase/class-use/HBaseSink.html      |   126 +
 .../class-use/HBaseSinkConfigurationConstants.html |   126 +
 .../sink/hbase/class-use/HbaseEventSerializer.html |   176 +
 .../hbase/class-use/RegexHbaseEventSerializer.html |   126 +
 .../class-use/SimpleAsyncHbaseEventSerializer.html |   126 +
 .../SimpleHbaseEventSerializer.KeyType.html        |   175 +
 .../class-use/SimpleHbaseEventSerializer.html      |   126 +
 .../hbase/class-use/SimpleRowKeyGenerator.html     |   126 +
 .../org/apache/flume/sink/hbase/package-frame.html |    37 +
 .../apache/flume/sink/hbase/package-summary.html   |   233 +
 .../org/apache/flume/sink/hbase/package-tree.html  |   177 +
 .../org/apache/flume/sink/hbase/package-use.html   |   171 +
 .../org/apache/flume/sink/hbase2/BatchAware.html   |   226 +
 .../flume/sink/hbase2/HBase2EventSerializer.html   |   312 +
 .../org/apache/flume/sink/hbase2/HBase2Sink.html   |   491 +
 .../hbase2/HBase2SinkConfigurationConstants.html   |   582 +
 .../sink/hbase2/RegexHBase2EventSerializer.html    |   733 ++
 .../SimpleHBase2EventSerializer.KeyType.html       |   371 +
 .../sink/hbase2/SimpleHBase2EventSerializer.html   |   437 +
 .../flume/sink/hbase2/SimpleRowKeyGenerator.html   |   334 +
 .../flume/sink/hbase2/class-use/BatchAware.html    |   126 +
 .../hbase2/class-use/HBase2EventSerializer.html    |   176 +
 .../flume/sink/hbase2/class-use/HBase2Sink.html    |   126 +
 .../HBase2SinkConfigurationConstants.html          |   126 +
 .../class-use/RegexHBase2EventSerializer.html      |   126 +
 .../SimpleHBase2EventSerializer.KeyType.html       |   175 +
 .../class-use/SimpleHBase2EventSerializer.html     |   126 +
 .../hbase2/class-use/SimpleRowKeyGenerator.html    |   126 +
 .../apache/flume/sink/hbase2/package-frame.html    |    34 +
 .../apache/flume/sink/hbase2/package-summary.html  |   212 +
 .../org/apache/flume/sink/hbase2/package-tree.html |   173 +
 .../org/apache/flume/sink/hbase2/package-use.html  |   165 +
 .../apache/flume/sink/hdfs/AbstractHDFSWriter.html |   433 +
 .../sink/hdfs/AvroEventSerializer.Builder.html     |   288 +
 .../flume/sink/hdfs/AvroEventSerializer.html       |   511 +
 .../flume/sink/hdfs/BucketClosedException.html     |   274 +
 .../flume/sink/hdfs/HDFSCompressedDataStream.html  |   403 +
 .../org/apache/flume/sink/hdfs/HDFSDataStream.html |   445 +
 .../sink/hdfs/HDFSEventSink.WriterCallback.html    |   227 +
 .../org/apache/flume/sink/hdfs/HDFSEventSink.html  |   515 +
 .../apache/flume/sink/hdfs/HDFSSequenceFile.html   |   429 +
 .../sink/hdfs/HDFSTextSerializer.Builder.html      |   286 +
 .../apache/flume/sink/hdfs/HDFSTextSerializer.html |   350 +
 .../sink/hdfs/HDFSWritableSerializer.Builder.html  |   286 +
 .../flume/sink/hdfs/HDFSWritableSerializer.html    |   350 +
 .../org/apache/flume/sink/hdfs/HDFSWriter.html     |   335 +
 .../apache/flume/sink/hdfs/HDFSWriterFactory.html  |   278 +
 .../org/apache/flume/sink/hdfs/KerberosUser.html   |   340 +
 .../sink/hdfs/SequenceFileSerializer.Builder.html  |   233 +
 .../sink/hdfs/SequenceFileSerializer.Record.html   |   293 +
 .../flume/sink/hdfs/SequenceFileSerializer.html    |   290 +
 .../sink/hdfs/SequenceFileSerializerFactory.html   |   239 +
 .../sink/hdfs/SequenceFileSerializerType.html      |   368 +
 .../sink/hdfs/class-use/AbstractHDFSWriter.html    |   174 +
 .../class-use/AvroEventSerializer.Builder.html     |   126 +
 .../sink/hdfs/class-use/AvroEventSerializer.html   |   126 +
 .../sink/hdfs/class-use/BucketClosedException.html |   126 +
 .../hdfs/class-use/HDFSCompressedDataStream.html   |   126 +
 .../flume/sink/hdfs/class-use/HDFSDataStream.html  |   126 +
 .../class-use/HDFSEventSink.WriterCallback.html    |   126 +
 .../flume/sink/hdfs/class-use/HDFSEventSink.html   |   126 +
 .../sink/hdfs/class-use/HDFSSequenceFile.html      |   126 +
 .../hdfs/class-use/HDFSTextSerializer.Builder.html |   126 +
 .../sink/hdfs/class-use/HDFSTextSerializer.html    |   126 +
 .../class-use/HDFSWritableSerializer.Builder.html  |   126 +
 .../hdfs/class-use/HDFSWritableSerializer.html     |   126 +
 .../flume/sink/hdfs/class-use/HDFSWriter.html      |   191 +
 .../sink/hdfs/class-use/HDFSWriterFactory.html     |   164 +
 .../flume/sink/hdfs/class-use/KerberosUser.html    |   126 +
 .../class-use/SequenceFileSerializer.Builder.html  |   183 +
 .../class-use/SequenceFileSerializer.Record.html   |   176 +
 .../hdfs/class-use/SequenceFileSerializer.html     |   191 +
 .../class-use/SequenceFileSerializerFactory.html   |   126 +
 .../hdfs/class-use/SequenceFileSerializerType.html |   175 +
 .../org/apache/flume/sink/hdfs/package-frame.html  |    50 +
 .../apache/flume/sink/hdfs/package-summary.html    |   267 +
 .../org/apache/flume/sink/hdfs/package-tree.html   |   200 +
 .../org/apache/flume/sink/hdfs/package-use.html    |   182 +
 .../apidocs/org/apache/flume/sink/hive/Config.html |   585 +
 .../sink/hive/HiveDelimitedTextSerializer.html     |   482 +
 .../flume/sink/hive/HiveEventSerializer.html       |   296 +
 .../apache/flume/sink/hive/HiveJsonSerializer.html |   414 +
 .../org/apache/flume/sink/hive/HiveSink.html       |   463 +
 .../apache/flume/sink/hive/class-use/Config.html   |   126 +
 .../class-use/HiveDelimitedTextSerializer.html     |   126 +
 .../sink/hive/class-use/HiveEventSerializer.html   |   176 +
 .../sink/hive/class-use/HiveJsonSerializer.html    |   126 +
 .../apache/flume/sink/hive/class-use/HiveSink.html |   126 +
 .../org/apache/flume/sink/hive/package-frame.html  |    28 +
 .../apache/flume/sink/hive/package-summary.html    |   177 +
 .../org/apache/flume/sink/hive/package-tree.html   |   154 +
 .../org/apache/flume/sink/hive/package-use.html    |   159 +
 .../org/apache/flume/sink/http/HttpSink.html       |   420 +
 .../apache/flume/sink/http/class-use/HttpSink.html |   126 +
 .../org/apache/flume/sink/http/package-frame.html  |    21 +
 .../apache/flume/sink/http/package-summary.html    |   157 +
 .../org/apache/flume/sink/http/package-tree.html   |   143 +
 .../org/apache/flume/sink/http/package-use.html    |   126 +
 .../sink/irc/IRCSink.IRCConnectionListener.html    |   642 +
 .../apidocs/org/apache/flume/sink/irc/IRCSink.html |   425 +
 .../class-use/IRCSink.IRCConnectionListener.html   |   126 +
 .../apache/flume/sink/irc/class-use/IRCSink.html   |   126 +
 .../org/apache/flume/sink/irc/package-frame.html   |    22 +
 .../org/apache/flume/sink/irc/package-summary.html |   148 +
 .../org/apache/flume/sink/irc/package-tree.html    |   144 +
 .../org/apache/flume/sink/irc/package-use.html     |   126 +
 .../org/apache/flume/sink/kafka/KafkaSink.html     |   492 +
 .../flume/sink/kafka/KafkaSinkConstants.html       |   670 +
 .../flume/sink/kafka/class-use/KafkaSink.html      |   126 +
 .../sink/kafka/class-use/KafkaSinkConstants.html   |   126 +
 .../org/apache/flume/sink/kafka/package-frame.html |    22 +
 .../apache/flume/sink/kafka/package-summary.html   |   150 +
 .../org/apache/flume/sink/kafka/package-tree.html  |   144 +
 .../org/apache/flume/sink/kafka/package-use.html   |   126 +
 .../org/apache/flume/sink/package-frame.html       |    40 +
 .../org/apache/flume/sink/package-summary.html     |   254 +
 .../org/apache/flume/sink/package-tree.html        |   179 +
 .../apidocs/org/apache/flume/sink/package-use.html |   336 +
 .../solr/morphline/BlobDeserializer.Builder.html   |   289 +
 .../sink/solr/morphline/BlobDeserializer.html      |   506 +
 .../flume/sink/solr/morphline/BlobHandler.html     |   398 +
 .../sink/solr/morphline/MorphlineHandler.html      |   323 +
 .../sink/solr/morphline/MorphlineHandlerImpl.html  |   489 +
 .../morphline/MorphlineInterceptor.Builder.html    |   318 +
 .../sink/solr/morphline/MorphlineInterceptor.html  |   388 +
 .../flume/sink/solr/morphline/MorphlineSink.html   |   550 +
 .../sink/solr/morphline/MorphlineSolrSink.html     |   371 +
 .../solr/morphline/UUIDInterceptor.Builder.html    |   318 +
 .../flume/sink/solr/morphline/UUIDInterceptor.html |   499 +
 .../class-use/BlobDeserializer.Builder.html        |   126 +
 .../solr/morphline/class-use/BlobDeserializer.html |   167 +
 .../sink/solr/morphline/class-use/BlobHandler.html |   126 +
 .../solr/morphline/class-use/MorphlineHandler.html |   186 +
 .../morphline/class-use/MorphlineHandlerImpl.html  |   126 +
 .../class-use/MorphlineInterceptor.Builder.html    |   126 +
 .../morphline/class-use/MorphlineInterceptor.html  |   166 +
 .../solr/morphline/class-use/MorphlineSink.html    |   169 +
 .../morphline/class-use/MorphlineSolrSink.html     |   126 +
 .../class-use/UUIDInterceptor.Builder.html         |   126 +
 .../solr/morphline/class-use/UUIDInterceptor.html  |   166 +
 .../flume/sink/solr/morphline/package-frame.html   |    34 +
 .../flume/sink/solr/morphline/package-summary.html |   223 +
 .../flume/sink/solr/morphline/package-tree.html    |   163 +
 .../flume/sink/solr/morphline/package-use.html     |   184 +
 .../flume/source/AbstractEventDrivenSource.html    |   282 +
 .../flume/source/AbstractPollableSource.html       |   450 +
 .../org/apache/flume/source/AbstractSource.html    |   444 +
 .../org/apache/flume/source/AvroSource.html        |   532 +
 .../apache/flume/source/BasicSourceSemantics.html  |   579 +
 .../apache/flume/source/DefaultSourceFactory.html  |   307 +
 .../flume/source/EventDrivenSourceRunner.html      |   363 +
 .../org/apache/flume/source/ExecSource.html        |   505 +
 .../source/ExecSourceConfigurationConstants.html   |   506 +
 .../flume/source/MultiportSyslogTCPSource.html     |   475 +
 .../org/apache/flume/source/NetcatSource.html      |   439 +
 .../source/NetcatSourceConfigurationConstants.html |   396 +
 .../source/NetcatUdpSource.NetcatHandler.html      |   348 +
 .../org/apache/flume/source/NetcatUdpSource.html   |   421 +
 .../flume/source/PollableSourceConstants.html      |   330 +
 .../source/PollableSourceRunner.PollingRunner.html |   286 +
 .../apache/flume/source/PollableSourceRunner.html  |   393 +
 .../flume/source/SequenceGeneratorSource.html      |   438 +
 ...poolDirectorySource.SpoolDirectoryRunnable.html |   288 +
 .../apache/flume/source/SpoolDirectorySource.html  |   533 +
 ...ySourceConfigurationConstants.ConsumeOrder.html |   360 +
 ...SpoolDirectorySourceConfigurationConstants.html |  1033 ++
 .../source/SslContextAwareAbstractSource.html      |   462 +
 .../org/apache/flume/source/StressSource.html      |   461 +
 .../org/apache/flume/source/SyslogParser.html      |   337 +
 .../source/SyslogSourceConfigurationConstants.html |   700 +
 .../source/SyslogTcpSource.SyslogTcpHandler.html   |   314 +
 .../org/apache/flume/source/SyslogTcpSource.html   |   435 +
 .../source/SyslogUDPSource.SyslogUdpHandler.html   |   314 +
 .../org/apache/flume/source/SyslogUDPSource.html   |   465 +
 .../flume/source/SyslogUtils.SyslogStatus.html     |   372 +
 .../org/apache/flume/source/SyslogUtils.html       |   822 ++
 .../org/apache/flume/source/ThriftSource.html      |   553 +
 .../flume/source/avroLegacy/AvroLegacySource.html  |   581 +
 .../avroLegacy/class-use/AvroLegacySource.html     |   126 +
 .../flume/source/avroLegacy/package-frame.html     |    21 +
 .../flume/source/avroLegacy/package-summary.html   |   148 +
 .../flume/source/avroLegacy/package-tree.html      |   143 +
 .../flume/source/avroLegacy/package-use.html       |   126 +
 .../class-use/AbstractEventDrivenSource.html       |   126 +
 .../source/class-use/AbstractPollableSource.html   |   219 +
 .../flume/source/class-use/AbstractSource.html     |   391 +
 .../apache/flume/source/class-use/AvroSource.html  |   126 +
 .../source/class-use/BasicSourceSemantics.html     |   239 +
 .../source/class-use/DefaultSourceFactory.html     |   126 +
 .../source/class-use/EventDrivenSourceRunner.html  |   126 +
 .../apache/flume/source/class-use/ExecSource.html  |   126 +
 .../ExecSourceConfigurationConstants.html          |   126 +
 .../source/class-use/MultiportSyslogTCPSource.html |   126 +
 .../flume/source/class-use/NetcatSource.html       |   126 +
 .../NetcatSourceConfigurationConstants.html        |   126 +
 .../class-use/NetcatUdpSource.NetcatHandler.html   |   126 +
 .../flume/source/class-use/NetcatUdpSource.html    |   126 +
 .../source/class-use/PollableSourceConstants.html  |   126 +
 .../PollableSourceRunner.PollingRunner.html        |   126 +
 .../source/class-use/PollableSourceRunner.html     |   126 +
 .../source/class-use/SequenceGeneratorSource.html  |   126 +
 ...poolDirectorySource.SpoolDirectoryRunnable.html |   126 +
 .../source/class-use/SpoolDirectorySource.html     |   126 +
 ...ySourceConfigurationConstants.ConsumeOrder.html |   210 +
 ...SpoolDirectorySourceConfigurationConstants.html |   126 +
 .../class-use/SslContextAwareAbstractSource.html   |   210 +
 .../flume/source/class-use/StressSource.html       |   126 +
 .../flume/source/class-use/SyslogParser.html       |   126 +
 .../SyslogSourceConfigurationConstants.html        |   126 +
 .../SyslogTcpSource.SyslogTcpHandler.html          |   126 +
 .../flume/source/class-use/SyslogTcpSource.html    |   126 +
 .../SyslogUDPSource.SyslogUdpHandler.html          |   126 +
 .../flume/source/class-use/SyslogUDPSource.html    |   126 +
 .../source/class-use/SyslogUtils.SyslogStatus.html |   175 +
 .../apache/flume/source/class-use/SyslogUtils.html |   126 +
 .../flume/source/class-use/ThriftSource.html       |   126 +
 .../org/apache/flume/source/http/BLOBHandler.html  |   404 +
 .../flume/source/http/HTTPBadRequestException.html |   302 +
 .../org/apache/flume/source/http/HTTPSource.html   |   439 +
 .../http/HTTPSourceConfigurationConstants.html     |   448 +
 .../flume/source/http/HTTPSourceHandler.html       |   258 +
 .../org/apache/flume/source/http/JSONHandler.html  |   363 +
 .../flume/source/http/class-use/BLOBHandler.html   |   126 +
 .../http/class-use/HTTPBadRequestException.html    |   169 +
 .../flume/source/http/class-use/HTTPSource.html    |   126 +
 .../HTTPSourceConfigurationConstants.html          |   126 +
 .../source/http/class-use/HTTPSourceHandler.html   |   200 +
 .../flume/source/http/class-use/JSONHandler.html   |   126 +
 .../apache/flume/source/http/package-frame.html    |    32 +
 .../apache/flume/source/http/package-summary.html  |   195 +
 .../org/apache/flume/source/http/package-tree.html |   175 +
 .../org/apache/flume/source/http/package-use.html  |   184 +
 .../jms/DefaultJMSMessageConverter.Builder.html    |   286 +
 .../source/jms/DefaultJMSMessageConverter.html     |   283 +
 .../flume/source/jms/InitialContextFactory.html    |   278 +
 .../flume/source/jms/JMSDestinationLocator.html    |   343 +
 .../flume/source/jms/JMSDestinationType.html       |   343 +
 .../source/jms/JMSMessageConverter.Builder.html    |   233 +
 .../flume/source/jms/JMSMessageConverter.html      |   260 +
 .../org/apache/flume/source/jms/JMSSource.html     |   434 +
 .../flume/source/jms/JMSSourceConfiguration.html   |   755 ++
 .../DefaultJMSMessageConverter.Builder.html        |   126 +
 .../jms/class-use/DefaultJMSMessageConverter.html  |   126 +
 .../jms/class-use/InitialContextFactory.html       |   164 +
 .../jms/class-use/JMSDestinationLocator.html       |   175 +
 .../source/jms/class-use/JMSDestinationType.html   |   175 +
 .../jms/class-use/JMSMessageConverter.Builder.html |   166 +
 .../source/jms/class-use/JMSMessageConverter.html  |   186 +
 .../flume/source/jms/class-use/JMSSource.html      |   126 +
 .../jms/class-use/JMSSourceConfiguration.html      |   126 +
 .../org/apache/flume/source/jms/package-frame.html |    35 +
 .../apache/flume/source/jms/package-summary.html   |   206 +
 .../org/apache/flume/source/jms/package-tree.html  |   169 +
 .../org/apache/flume/source/jms/package-use.html   |   176 +
 .../flume/source/kafka/KafkaSource.Subscriber.html |   294 +
 .../org/apache/flume/source/kafka/KafkaSource.html |   485 +
 .../flume/source/kafka/KafkaSourceConstants.html   |   738 ++
 .../kafka/class-use/KafkaSource.Subscriber.html    |   126 +
 .../flume/source/kafka/class-use/KafkaSource.html  |   126 +
 .../kafka/class-use/KafkaSourceConstants.html      |   126 +
 .../apache/flume/source/kafka/package-frame.html   |    22 +
 .../apache/flume/source/kafka/package-summary.html |   150 +
 .../apache/flume/source/kafka/package-tree.html    |   149 +
 .../org/apache/flume/source/kafka/package-use.html |   126 +
 .../org/apache/flume/source/package-frame.html     |    52 +
 .../org/apache/flume/source/package-summary.html   |   313 +
 .../org/apache/flume/source/package-tree.html      |   210 +
 .../org/apache/flume/source/package-use.html       |   431 +
 .../flume/source/scribe/LogEntry._Fields.html      |   433 +
 .../org/apache/flume/source/scribe/LogEntry.html   |   765 ++
 .../org/apache/flume/source/scribe/ResultCode.html |   384 +
 .../source/scribe/Scribe.AsyncClient.Factory.html  |   288 +
 .../source/scribe/Scribe.AsyncClient.Log_call.html |   359 +
 .../flume/source/scribe/Scribe.AsyncClient.html    |   345 +
 .../flume/source/scribe/Scribe.AsyncIface.html     |   238 +
 .../source/scribe/Scribe.AsyncProcessor.Log.html   |   355 +
 .../flume/source/scribe/Scribe.AsyncProcessor.html |   299 +
 .../flume/source/scribe/Scribe.Client.Factory.html |   305 +
 .../apache/flume/source/scribe/Scribe.Client.html  |   385 +
 .../apache/flume/source/scribe/Scribe.Iface.html   |   236 +
 .../source/scribe/Scribe.Log_args._Fields.html     |   421 +
 .../flume/source/scribe/Scribe.Log_args.html       |   723 +
 .../source/scribe/Scribe.Log_result._Fields.html   |   425 +
 .../flume/source/scribe/Scribe.Log_result.html     |   696 +
 .../flume/source/scribe/Scribe.Processor.Log.html  |   334 +
 .../flume/source/scribe/Scribe.Processor.html      |   307 +
 .../org/apache/flume/source/scribe/Scribe.html     |   288 +
 .../apache/flume/source/scribe/ScribeSource.html   |   435 +
 .../source/scribe/class-use/LogEntry._Fields.html  |   235 +
 .../flume/source/scribe/class-use/LogEntry.html    |   291 +
 .../flume/source/scribe/class-use/ResultCode.html  |   267 +
 .../class-use/Scribe.AsyncClient.Factory.html      |   126 +
 .../class-use/Scribe.AsyncClient.Log_call.html     |   126 +
 .../scribe/class-use/Scribe.AsyncClient.html       |   166 +
 .../source/scribe/class-use/Scribe.AsyncIface.html |   183 +
 .../class-use/Scribe.AsyncProcessor.Log.html       |   126 +
 .../scribe/class-use/Scribe.AsyncProcessor.html    |   126 +
 .../scribe/class-use/Scribe.Client.Factory.html    |   126 +
 .../source/scribe/class-use/Scribe.Client.html     |   171 +
 .../source/scribe/class-use/Scribe.Iface.html      |   183 +
 .../scribe/class-use/Scribe.Log_args._Fields.html  |   235 +
 .../source/scribe/class-use/Scribe.Log_args.html   |   219 +
 .../class-use/Scribe.Log_result._Fields.html       |   235 +
 .../source/scribe/class-use/Scribe.Log_result.html |   205 +
 .../scribe/class-use/Scribe.Processor.Log.html     |   126 +
 .../source/scribe/class-use/Scribe.Processor.html  |   126 +
 .../flume/source/scribe/class-use/Scribe.html      |   126 +
 .../source/scribe/class-use/ScribeSource.html      |   126 +
 .../apache/flume/source/scribe/package-frame.html  |    46 +
 .../flume/source/scribe/package-summary.html       |   251 +
 .../apache/flume/source/scribe/package-tree.html   |   204 +
 .../apache/flume/source/scribe/package-use.html    |   195 +
 .../flume/source/shaded/guava/RateLimiter.html     |   620 +
 .../flume/source/shaded/guava/Stopwatch.html       |   487 +
 .../source/shaded/guava/Uninterruptibles.html      |   494 +
 .../source/shaded/guava/class-use/RateLimiter.html |   181 +
 .../source/shaded/guava/class-use/Stopwatch.html   |   209 +
 .../shaded/guava/class-use/Uninterruptibles.html   |   126 +
 .../flume/source/shaded/guava/package-frame.html   |    23 +
 .../flume/source/shaded/guava/package-summary.html |   158 +
 .../flume/source/shaded/guava/package-tree.html    |   141 +
 .../flume/source/shaded/guava/package-use.html     |   166 +
 .../ReliableTaildirEventReader.Builder.html        |   387 +
 .../source/taildir/ReliableTaildirEventReader.html |   483 +
 .../org/apache/flume/source/taildir/TailFile.html  |   520 +
 .../flume/source/taildir/TaildirMatcher.html       |   315 +
 .../apache/flume/source/taildir/TaildirSource.html |   522 +
 .../TaildirSourceConfigurationConstants.html       |   704 +
 .../ReliableTaildirEventReader.Builder.html        |   194 +
 .../class-use/ReliableTaildirEventReader.html      |   166 +
 .../flume/source/taildir/class-use/TailFile.html   |   184 +
 .../source/taildir/class-use/TaildirMatcher.html   |   126 +
 .../source/taildir/class-use/TaildirSource.html    |   126 +
 .../TaildirSourceConfigurationConstants.html       |   126 +
 .../apache/flume/source/taildir/package-frame.html |    26 +
 .../flume/source/taildir/package-summary.html      |   168 +
 .../apache/flume/source/taildir/package-tree.html  |   148 +
 .../apache/flume/source/taildir/package-use.html   |   167 +
 .../ThriftLegacySource.ThriftHandler.html          |   286 +
 .../source/thriftLegacy/ThriftLegacySource.html    |   408 +
 .../ThriftLegacySource.ThriftHandler.html          |   126 +
 .../thriftLegacy/class-use/ThriftLegacySource.html |   126 +
 .../flume/source/thriftLegacy/package-frame.html   |    22 +
 .../flume/source/thriftLegacy/package-summary.html |   148 +
 .../flume/source/thriftLegacy/package-tree.html    |   144 +
 .../flume/source/thriftLegacy/package-use.html     |   126 +
 .../apache/flume/source/twitter/TwitterSource.html |   522 +
 .../source/twitter/class-use/TwitterSource.html    |   126 +
 .../apache/flume/source/twitter/package-frame.html |    21 +
 .../flume/source/twitter/package-summary.html      |   148 +
 .../apache/flume/source/twitter/package-tree.html  |   143 +
 .../apache/flume/source/twitter/package-use.html   |   126 +
 .../apidocs/org/apache/flume/thrift/Status.html    |   408 +
 .../flume/thrift/ThriftFlumeEvent._Fields.html     |   433 +
 .../org/apache/flume/thrift/ThriftFlumeEvent.html  |   819 ++
 .../ThriftSourceProtocol.AsyncClient.Factory.html  |   288 +
 ...ourceProtocol.AsyncClient.appendBatch_call.html |   359 +
 ...riftSourceProtocol.AsyncClient.append_call.html |   359 +
 .../thrift/ThriftSourceProtocol.AsyncClient.html   |   371 +
 .../thrift/ThriftSourceProtocol.AsyncIface.html    |   258 +
 ...ThriftSourceProtocol.AsyncProcessor.append.html |   355 +
 ...tSourceProtocol.AsyncProcessor.appendBatch.html |   355 +
 .../ThriftSourceProtocol.AsyncProcessor.html       |   303 +
 .../ThriftSourceProtocol.Client.Factory.html       |   305 +
 .../flume/thrift/ThriftSourceProtocol.Client.html  |   441 +
 .../flume/thrift/ThriftSourceProtocol.Iface.html   |   254 +
 .../ThriftSourceProtocol.Processor.append.html     |   334 +
 ...ThriftSourceProtocol.Processor.appendBatch.html |   334 +
 .../thrift/ThriftSourceProtocol.Processor.html     |   311 +
 ...iftSourceProtocol.appendBatch_args._Fields.html |   421 +
 .../ThriftSourceProtocol.appendBatch_args.html     |   723 +
 ...tSourceProtocol.appendBatch_result._Fields.html |   425 +
 .../ThriftSourceProtocol.appendBatch_result.html   |   696 +
 .../ThriftSourceProtocol.append_args._Fields.html  |   421 +
 .../thrift/ThriftSourceProtocol.append_args.html   |   684 +
 ...ThriftSourceProtocol.append_result._Fields.html |   425 +
 .../thrift/ThriftSourceProtocol.append_result.html |   696 +
 .../apache/flume/thrift/ThriftSourceProtocol.html  |   296 +
 .../org/apache/flume/thrift/class-use/Status.html  |   309 +
 .../thrift/class-use/ThriftFlumeEvent._Fields.html |   235 +
 .../flume/thrift/class-use/ThriftFlumeEvent.html   |   348 +
 .../ThriftSourceProtocol.AsyncClient.Factory.html  |   126 +
 ...ourceProtocol.AsyncClient.appendBatch_call.html |   126 +
 ...riftSourceProtocol.AsyncClient.append_call.html |   126 +
 .../ThriftSourceProtocol.AsyncClient.html          |   166 +
 .../class-use/ThriftSourceProtocol.AsyncIface.html |   187 +
 ...ThriftSourceProtocol.AsyncProcessor.append.html |   126 +
 ...tSourceProtocol.AsyncProcessor.appendBatch.html |   126 +
 .../ThriftSourceProtocol.AsyncProcessor.html       |   126 +
 .../ThriftSourceProtocol.Client.Factory.html       |   126 +
 .../class-use/ThriftSourceProtocol.Client.html     |   171 +
 .../class-use/ThriftSourceProtocol.Iface.html      |   187 +
 .../ThriftSourceProtocol.Processor.append.html     |   126 +
 ...ThriftSourceProtocol.Processor.appendBatch.html |   126 +
 .../class-use/ThriftSourceProtocol.Processor.html  |   126 +
 ...iftSourceProtocol.appendBatch_args._Fields.html |   235 +
 .../ThriftSourceProtocol.appendBatch_args.html     |   219 +
 ...tSourceProtocol.appendBatch_result._Fields.html |   235 +
 .../ThriftSourceProtocol.appendBatch_result.html   |   205 +
 .../ThriftSourceProtocol.append_args._Fields.html  |   235 +
 .../ThriftSourceProtocol.append_args.html          |   219 +
 ...ThriftSourceProtocol.append_result._Fields.html |   235 +
 .../ThriftSourceProtocol.append_result.html        |   205 +
 .../thrift/class-use/ThriftSourceProtocol.html     |   126 +
 .../org/apache/flume/thrift/package-frame.html     |    52 +
 .../org/apache/flume/thrift/package-summary.html   |   276 +
 .../org/apache/flume/thrift/package-tree.html      |   206 +
 .../org/apache/flume/thrift/package-use.html       |   211 +
 .../org/apache/flume/tools/DirectMemoryUtils.html  |   304 +
 .../apache/flume/tools/EventValidator.Builder.html |   239 +
 .../org/apache/flume/tools/EventValidator.html     |   289 +
 .../flume/tools/FileChannelIntegrityTool.html      |   323 +
 .../apache/flume/tools/FlumeBeanConfigurator.html  |   361 +
 .../apidocs/org/apache/flume/tools/FlumeTool.html  |   232 +
 .../org/apache/flume/tools/FlumeToolType.html      |   357 +
 .../org/apache/flume/tools/FlumeToolsMain.html     |   269 +
 .../org/apache/flume/tools/GetJavaProperty.html    |   274 +
 .../flume/tools/HTTPServerConstraintUtil.html      |   247 +
 .../org/apache/flume/tools/PlatformDetect.html     |   281 +
 .../apache/flume/tools/TimestampRoundDownUtil.html |   444 +
 .../org/apache/flume/tools/VersionInfo.html        |   425 +
 .../flume/tools/class-use/DirectMemoryUtils.html   |   126 +
 .../tools/class-use/EventValidator.Builder.html    |   126 +
 .../flume/tools/class-use/EventValidator.html      |   179 +
 .../tools/class-use/FileChannelIntegrityTool.html  |   126 +
 .../tools/class-use/FlumeBeanConfigurator.html     |   126 +
 .../apache/flume/tools/class-use/FlumeTool.html    |   183 +
 .../flume/tools/class-use/FlumeToolType.html       |   175 +
 .../flume/tools/class-use/FlumeToolsMain.html      |   126 +
 .../flume/tools/class-use/GetJavaProperty.html     |   126 +
 .../tools/class-use/HTTPServerConstraintUtil.html  |   126 +
 .../flume/tools/class-use/PlatformDetect.html      |   126 +
 .../tools/class-use/TimestampRoundDownUtil.html    |   126 +
 .../apache/flume/tools/class-use/VersionInfo.html  |   126 +
 .../org/apache/flume/tools/package-frame.html      |    39 +
 .../org/apache/flume/tools/package-summary.html    |   226 +
 .../org/apache/flume/tools/package-tree.html       |   169 +
 .../org/apache/flume/tools/package-use.html        |   168 +
 .../org/apache/flume/util/OrderSelector.html       |   415 +
 .../org/apache/flume/util/RandomOrderSelector.html |   293 +
 .../apache/flume/util/RoundRobinOrderSelector.html |   293 +
 .../apidocs/org/apache/flume/util/SSLUtil.html     |   403 +
 .../apache/flume/util/SpecificOrderIterator.html   |   329 +
 .../apache/flume/util/class-use/OrderSelector.html |   174 +
 .../flume/util/class-use/RandomOrderSelector.html  |   126 +
 .../util/class-use/RoundRobinOrderSelector.html    |   126 +
 .../org/apache/flume/util/class-use/SSLUtil.html   |   126 +
 .../util/class-use/SpecificOrderIterator.html      |   126 +
 .../org/apache/flume/util/package-frame.html       |    25 +
 .../org/apache/flume/util/package-summary.html     |   170 +
 .../org/apache/flume/util/package-tree.html        |   146 +
 .../apidocs/org/apache/flume/util/package-use.html |   162 +
 .../content/1.10.0/apidocs/overview-frame.html     |    79 +
 .../content/1.10.0/apidocs/overview-summary.html   |   374 +
 .../content/1.10.0/apidocs/overview-tree.html      |  1145 ++
 .../releases/content/1.10.0/apidocs/package-list   |    58 +
 content/releases/content/1.10.0/apidocs/script.js  |    30 +
 .../content/1.10.0/apidocs/serialized-form.html    |  1590 +++
 .../releases/content/1.10.0/apidocs/stylesheet.css |   574 +
 1440 files changed, 421064 insertions(+)

diff --git a/content/releases/content/1.10.0/FlumeDeveloperGuide.html b/content/releases/content/1.10.0/FlumeDeveloperGuide.html
new file mode 100644
index 0000000..e3d6b28
--- /dev/null
+++ b/content/releases/content/1.10.0/FlumeDeveloperGuide.html
@@ -0,0 +1,1074 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Flume 1.10.0 Developer Guide &mdash; Apache Flume</title>
+    
+    <link rel="stylesheet" href="_static/flume.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    '',
+        VERSION:     '',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <link rel="top" title="Apache Flume" href="index.html" />
+    <link rel="up" title="Documentation" href="documentation.html" />
+    <link rel="next" title="Releases" href="releases/index.html" />
+    <link rel="prev" title="Flume 1.10.0 User Guide" href="FlumeUserGuide.html" /> 
+  </head>
+  <body>
+<div class="header">
+  <table width="100%" border="0">
+    <tr>
+      <td width="10%">
+        <div class="logo">
+          <a href="index.html">
+            <img class="logo" src="_static/flume-logo.png" alt="Logo"/>
+          </a>
+        </div>
+      </td>
+      <td width="2%">
+          <span class="trademark">&trade;</span>
+      </td>
+      <td width="68%" align="center" class="pageTitle">Apache Flume<sup><span class="trademark">&trade;</span></sup>
+      </td>
+      <td width="20%">
+          <a href="http://www.apache.org">
+            <img src="_static/feather-small.png" alt="Apache Software Foundation" height="70"/>
+          </a>
+      </td>
+    </tr>
+  </table>
+</div>
+  
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="flume-1-10-0-developer-guide">
+<h1>Flume 1.10.0 Developer Guide<a class="headerlink" href="#flume-1-10-0-developer-guide" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="introduction">
+<h2>Introduction<a class="headerlink" href="#introduction" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="overview">
+<h3>Overview<a class="headerlink" href="#overview" title="Permalink to this headline">¶</a></h3>
+<p>Apache Flume is a distributed, reliable, and available system for
+efficiently collecting, aggregating and moving large amounts of log
+data from many different sources to a centralized data store.</p>
+<p>Apache Flume is a top-level project at the Apache Software Foundation.
+There are currently two release code lines available, versions 0.9.x and 1.x.
+This documentation applies to the 1.x codeline.
+For the 0.9.x codeline, please see the <a class="reference external" href="http://archive.cloudera.com/cdh/3/flume/DeveloperGuide/">Flume 0.9.x Developer Guide</a>.</p>
+</div>
+<div class="section" id="architecture">
+<h3>Architecture<a class="headerlink" href="#architecture" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="data-flow-model">
+<h4>Data flow model<a class="headerlink" href="#data-flow-model" title="Permalink to this headline">¶</a></h4>
+<p>An <tt class="docutils literal"><span class="pre">Event</span></tt> is a unit of data that flows through a Flume agent. The <tt class="docutils literal"><span class="pre">Event</span></tt>
+flows from <tt class="docutils literal"><span class="pre">Source</span></tt> to <tt class="docutils literal"><span class="pre">Channel</span></tt> to <tt class="docutils literal"><span class="pre">Sink</span></tt>, and is represented by an
+implementation of the <tt class="docutils literal"><span class="pre">Event</span></tt> interface. An <tt class="docutils literal"><span class="pre">Event</span></tt> carries a payload (byte
+array) that is accompanied by an optional set of headers (string attributes).
+A Flume agent is a process (JVM) that hosts the components that allow
+<tt class="docutils literal"><span class="pre">Event</span></tt>s to flow from an external source to a external destination.</p>
+<div class="figure align-center">
+<img alt="Agent component diagram" src="_images/DevGuide_image00.png" />
+</div>
+<p>A <tt class="docutils literal"><span class="pre">Source</span></tt> consumes <tt class="docutils literal"><span class="pre">Event</span></tt>s having a specific format, and those
+<tt class="docutils literal"><span class="pre">Event</span></tt>s are delivered to the <tt class="docutils literal"><span class="pre">Source</span></tt> by an external source like a web
+server. For example, an <tt class="docutils literal"><span class="pre">AvroSource</span></tt> can be used to receive Avro <tt class="docutils literal"><span class="pre">Event</span></tt>s
+from clients or from other Flume agents in the flow. When a <tt class="docutils literal"><span class="pre">Source</span></tt> receives
+an <tt class="docutils literal"><span class="pre">Event</span></tt>, it stores it into one or more <tt class="docutils literal"><span class="pre">Channel</span></tt>s.  The <tt class="docutils literal"><span class="pre">Channel</span></tt> is
+a passive store that holds the <tt class="docutils literal"><span class="pre">Event</span></tt> until that <tt class="docutils literal"><span class="pre">Event</span></tt> is consumed by a
+<tt class="docutils literal"><span class="pre">Sink</span></tt>. One type of <tt class="docutils literal"><span class="pre">Channel</span></tt> available in Flume is the <tt class="docutils literal"><span class="pre">FileChannel</span></tt>
+which uses the local filesystem as its backing store. A <tt class="docutils literal"><span class="pre">Sink</span></tt> is responsible
+for removing an <tt class="docutils literal"><span class="pre">Event</span></tt> from the <tt class="docutils literal"><span class="pre">Channel</span></tt> and putting it into an external
+repository like HDFS (in the case of an <tt class="docutils literal"><span class="pre">HDFSEventSink</span></tt>) or forwarding it to
+the <tt class="docutils literal"><span class="pre">Source</span></tt> at the next hop of the flow. The <tt class="docutils literal"><span class="pre">Source</span></tt> and <tt class="docutils literal"><span class="pre">Sink</span></tt> within
+the given agent run asynchronously with the <tt class="docutils literal"><span class="pre">Event</span></tt>s staged in the
+<tt class="docutils literal"><span class="pre">Channel</span></tt>.</p>
+</div>
+<div class="section" id="reliability">
+<h4>Reliability<a class="headerlink" href="#reliability" title="Permalink to this headline">¶</a></h4>
+<p>An <tt class="docutils literal"><span class="pre">Event</span></tt> is staged in a Flume agent&#8217;s <tt class="docutils literal"><span class="pre">Channel</span></tt>. Then it&#8217;s the
+<tt class="docutils literal"><span class="pre">Sink</span></tt>&#8216;s responsibility to deliver the <tt class="docutils literal"><span class="pre">Event</span></tt> to the next agent or
+terminal repository (like HDFS) in the flow. The <tt class="docutils literal"><span class="pre">Sink</span></tt> removes an <tt class="docutils literal"><span class="pre">Event</span></tt>
+from the <tt class="docutils literal"><span class="pre">Channel</span></tt> only after the <tt class="docutils literal"><span class="pre">Event</span></tt> is stored into the <tt class="docutils literal"><span class="pre">Channel</span></tt> of
+the next agent or stored in the terminal repository. This is how the single-hop
+message delivery semantics in Flume provide end-to-end reliability of the flow.
+Flume uses a transactional approach to guarantee the reliable delivery of the
+<tt class="docutils literal"><span class="pre">Event</span></tt>s. The <tt class="docutils literal"><span class="pre">Source</span></tt>s and <tt class="docutils literal"><span class="pre">Sink</span></tt>s encapsulate the
+storage/retrieval of the <tt class="docutils literal"><span class="pre">Event</span></tt>s in a <tt class="docutils literal"><span class="pre">Transaction</span></tt> provided by the
+<tt class="docutils literal"><span class="pre">Channel</span></tt>. This ensures that the set of <tt class="docutils literal"><span class="pre">Event</span></tt>s are reliably passed from
+point to point in the flow. In the case of a multi-hop flow, the <tt class="docutils literal"><span class="pre">Sink</span></tt> from
+the previous hop and the <tt class="docutils literal"><span class="pre">Source</span></tt> of the next hop both have their
+<tt class="docutils literal"><span class="pre">Transaction</span></tt>s open to ensure that the <tt class="docutils literal"><span class="pre">Event</span></tt> data is safely stored in
+the <tt class="docutils literal"><span class="pre">Channel</span></tt> of the next hop.</p>
+</div>
+</div>
+<div class="section" id="building-flume">
+<h3>Building Flume<a class="headerlink" href="#building-flume" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="getting-the-source">
+<h4>Getting the source<a class="headerlink" href="#getting-the-source" title="Permalink to this headline">¶</a></h4>
+<p>Check-out the code using Git. Click here for
+<a class="reference external" href="https://git-wip-us.apache.org/repos/asf/flume.git">the git repository root</a>.</p>
+<p>The Flume 1.x development happens under the branch &#8220;trunk&#8221; so this command line
+can be used:</p>
+<blockquote>
+<div>git clone <a class="reference external" href="https://git-wip-us.apache.org/repos/asf/flume.git">https://git-wip-us.apache.org/repos/asf/flume.git</a></div></blockquote>
+</div>
+<div class="section" id="compile-test-flume">
+<h4>Compile/test Flume<a class="headerlink" href="#compile-test-flume" title="Permalink to this headline">¶</a></h4>
+<p>The Flume build is mavenized. You can compile Flume using the standard Maven
+commands:</p>
+<ol class="arabic simple">
+<li>Compile only: <tt class="docutils literal"><span class="pre">mvn</span> <span class="pre">clean</span> <span class="pre">compile</span></tt></li>
+<li>Compile and run unit tests: <tt class="docutils literal"><span class="pre">mvn</span> <span class="pre">clean</span> <span class="pre">test</span></tt></li>
+<li>Run individual test(s): <tt class="docutils literal"><span class="pre">mvn</span> <span class="pre">clean</span> <span class="pre">test</span> <span class="pre">-Dtest=&lt;Test1&gt;,&lt;Test2&gt;,...</span> <span class="pre">-DfailIfNoTests=false</span></tt></li>
+<li>Create tarball package: <tt class="docutils literal"><span class="pre">mvn</span> <span class="pre">clean</span> <span class="pre">install</span></tt></li>
+<li>Create tarball package (skip unit tests): <tt class="docutils literal"><span class="pre">mvn</span> <span class="pre">clean</span> <span class="pre">install</span> <span class="pre">-DskipTests</span></tt></li>
+</ol>
+<p>Please note that Flume builds requires that the Google Protocol Buffers compiler
+be in the path. You can download and install it by following the instructions
+<a class="reference external" href="https://developers.google.com/protocol-buffers/">here</a>.</p>
+</div>
+<div class="section" id="updating-protocol-buffer-version">
+<h4>Updating Protocol Buffer Version<a class="headerlink" href="#updating-protocol-buffer-version" title="Permalink to this headline">¶</a></h4>
+<p>File channel has a dependency on Protocol Buffer. When updating the version of Protocol Buffer
+used by Flume, it is necessary to regenerate the data access classes using the protoc compiler
+that is part of Protocol Buffer as follows.</p>
+<ol class="arabic simple">
+<li>Install the desired version of Protocol Buffer on your local machine</li>
+<li>Update version of Protocol Buffer in pom.xml</li>
+<li>Generate new Protocol Buffer data access classes in Flume: <tt class="docutils literal"><span class="pre">cd</span> <span class="pre">flume-ng-channels/flume-file-channel;</span> <span class="pre">mvn</span> <span class="pre">-P</span> <span class="pre">compile-proto</span> <span class="pre">clean</span> <span class="pre">package</span> <span class="pre">-DskipTests</span></tt></li>
+<li>Add Apache license header to any of the generated files that are missing it</li>
+<li>Rebuild and test Flume:  <tt class="docutils literal"><span class="pre">cd</span> <span class="pre">../..;</span> <span class="pre">mvn</span> <span class="pre">clean</span> <span class="pre">install</span></tt></li>
+</ol>
+</div>
+</div>
+<div class="section" id="developing-custom-components">
+<h3>Developing custom components<a class="headerlink" href="#developing-custom-components" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="client">
+<h4>Client<a class="headerlink" href="#client" title="Permalink to this headline">¶</a></h4>
+<p>The client operates at the point of origin of events and delivers them to a
+Flume agent. Clients typically operate in the process space of the application
+they are consuming data from. Flume currently supports Avro, log4j, syslog,
+and Http POST (with a JSON body) as ways to transfer data from a external
+source. Additionally, there’s an <tt class="docutils literal"><span class="pre">ExecSource</span></tt> that can consume the output of a
+local process as input to Flume.</p>
+<p>It’s quite possible to have a use case where these existing options are not
+sufficient. In this case you can build a custom mechanism to send data to
+Flume. There are two ways of achieving this. The first option is to create a
+custom client that communicates with one of Flume’s existing <tt class="docutils literal"><span class="pre">Source</span></tt>s like
+<tt class="docutils literal"><span class="pre">AvroSource</span></tt> or <tt class="docutils literal"><span class="pre">SyslogTcpSource</span></tt>. Here the client should convert its data
+into messages understood by these Flume <tt class="docutils literal"><span class="pre">Source</span></tt>s. The other option is to
+write a custom Flume <tt class="docutils literal"><span class="pre">Source</span></tt> that directly talks with your existing client
+application using some IPC or RPC protocol, and then converts the client data
+into Flume <tt class="docutils literal"><span class="pre">Event</span></tt>s to be sent downstream. Note that all events stored
+within the <tt class="docutils literal"><span class="pre">Channel</span></tt> of a Flume agent must exist as Flume <tt class="docutils literal"><span class="pre">Event</span></tt>s.</p>
+<div class="section" id="client-sdk">
+<h5>Client SDK<a class="headerlink" href="#client-sdk" title="Permalink to this headline">¶</a></h5>
+<p>Though Flume contains a number of built-in mechanisms (i.e. <tt class="docutils literal"><span class="pre">Source</span></tt>s) to
+ingest data, often one wants the ability to communicate with Flume directly from
+a custom application. The Flume Client SDK is a library that enables
+applications to connect to Flume and send data into Flume’s data flow over RPC.</p>
+</div>
+<div class="section" id="rpc-client-interface">
+<h5>RPC client interface<a class="headerlink" href="#rpc-client-interface" title="Permalink to this headline">¶</a></h5>
+<p>An implementation of Flume&#8217;s RpcClient interface encapsulates the RPC mechanism
+supported by Flume. The user&#8217;s application can simply call the Flume Client
+SDK&#8217;s <tt class="docutils literal"><span class="pre">append(Event)</span></tt> or <tt class="docutils literal"><span class="pre">appendBatch(List&lt;Event&gt;)</span></tt> to send data and not
+worry about the underlying message exchange details. The user can provide the
+required <tt class="docutils literal"><span class="pre">Event</span></tt> arg by either directly implementing the <tt class="docutils literal"><span class="pre">Event</span></tt> interface,
+by using a convenience implementation such as the SimpleEvent class, or by using
+<tt class="docutils literal"><span class="pre">EventBuilder</span></tt>&#8216;s overloaded <tt class="docutils literal"><span class="pre">withBody()</span></tt> static helper methods.</p>
+</div>
+<div class="section" id="rpc-clients-avro-and-thrift">
+<h5>RPC clients - Avro and Thrift<a class="headerlink" href="#rpc-clients-avro-and-thrift" title="Permalink to this headline">¶</a></h5>
+<p>As of Flume 1.4.0, Avro is the default RPC protocol.  The
+<tt class="docutils literal"><span class="pre">NettyAvroRpcClient</span></tt> and <tt class="docutils literal"><span class="pre">ThriftRpcClient</span></tt> implement the <tt class="docutils literal"><span class="pre">RpcClient</span></tt>
+interface. The client needs to create this object with the host and port of
+the target Flume agent, and can then use the <tt class="docutils literal"><span class="pre">RpcClient</span></tt> to send data into
+the agent. The following example shows how to use the Flume Client SDK API
+within a user&#8217;s data-generating application:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="kn">import</span> <span class="nn">org.apache.flume.Event</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.EventDeliveryException</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.api.RpcClient</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.api.RpcClientFactory</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.event.EventBuilder</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">java.nio.charset.Charset</span><span class="o">;</span>
+
+<span class="kd">public</span> <span class="kd">class</span> <span class="nc">MyApp</span> <span class="o">{</span>
+  <span class="kd">public</span> <span class="kd">static</span> <span class="kt">void</span> <span class="nf">main</span><span class="o">(</span><span class="n">String</span><span class="o">[]</span> <span class="n">args</span><span class="o">)</span> <span class="o">{</span>
+    <span class="n">MyRpcClientFacade</span> <span class="n">client</span> <span class="o">=</span> <span class="k">new</span> <span class="n">MyRpcClientFacade</span><span class="o">();</span>
+    <span class="c1">// Initialize client with the remote Flume agent&#39;s host and port</span>
+    <span class="n">client</span><span class="o">.</span><span class="na">init</span><span class="o">(</span><span class="s">&quot;host.example.org&quot;</span><span class="o">,</span> <span class="mi">41414</span><span class="o">);</span>
+
+    <span class="c1">// Send 10 events to the remote Flume agent. That agent should be</span>
+    <span class="c1">// configured to listen with an AvroSource.</span>
+    <span class="n">String</span> <span class="n">sampleData</span> <span class="o">=</span> <span class="s">&quot;Hello Flume!&quot;</span><span class="o">;</span>
+    <span class="k">for</span> <span class="o">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="o">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">10</span><span class="o">;</span> <span class="n">i</span><span class="o">++)</span> <span class="o">{</span>
+      <span class="n">client</span><span class="o">.</span><span class="na">sendDataToFlume</span><span class="o">(</span><span class="n">sampleData</span><span class="o">);</span>
+    <span class="o">}</span>
+
+    <span class="n">client</span><span class="o">.</span><span class="na">cleanUp</span><span class="o">();</span>
+  <span class="o">}</span>
+<span class="o">}</span>
+
+<span class="kd">class</span> <span class="nc">MyRpcClientFacade</span> <span class="o">{</span>
+  <span class="kd">private</span> <span class="n">RpcClient</span> <span class="n">client</span><span class="o">;</span>
+  <span class="kd">private</span> <span class="n">String</span> <span class="n">hostname</span><span class="o">;</span>
+  <span class="kd">private</span> <span class="kt">int</span> <span class="n">port</span><span class="o">;</span>
+
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">init</span><span class="o">(</span><span class="n">String</span> <span class="n">hostname</span><span class="o">,</span> <span class="kt">int</span> <span class="n">port</span><span class="o">)</span> <span class="o">{</span>
+    <span class="c1">// Setup the RPC connection</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">hostname</span> <span class="o">=</span> <span class="n">hostname</span><span class="o">;</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">port</span> <span class="o">=</span> <span class="n">port</span><span class="o">;</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">client</span> <span class="o">=</span> <span class="n">RpcClientFactory</span><span class="o">.</span><span class="na">getDefaultInstance</span><span class="o">(</span><span class="n">hostname</span><span class="o">,</span> <span class="n">port</span><span class="o">);</span>
+    <span class="c1">// Use the following method to create a thrift client (instead of the above line):</span>
+    <span class="c1">// this.client = RpcClientFactory.getThriftInstance(hostname, port);</span>
+  <span class="o">}</span>
+
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">sendDataToFlume</span><span class="o">(</span><span class="n">String</span> <span class="n">data</span><span class="o">)</span> <span class="o">{</span>
+    <span class="c1">// Create a Flume Event object that encapsulates the sample data</span>
+    <span class="n">Event</span> <span class="n">event</span> <span class="o">=</span> <span class="n">EventBuilder</span><span class="o">.</span><span class="na">withBody</span><span class="o">(</span><span class="n">data</span><span class="o">,</span> <span class="n">Charset</span><span class="o">.</span><span class="na">forName</span><span class="o">(</span><span class="s">&quot;UTF-8&quot;</span><span class="o">));</span>
+
+    <span class="c1">// Send the event</span>
+    <span class="k">try</span> <span class="o">{</span>
+      <span class="n">client</span><span class="o">.</span><span class="na">append</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+    <span class="o">}</span> <span class="k">catch</span> <span class="o">(</span><span class="n">EventDeliveryException</span> <span class="n">e</span><span class="o">)</span> <span class="o">{</span>
+      <span class="c1">// clean up and recreate the client</span>
+      <span class="n">client</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+      <span class="n">client</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+      <span class="n">client</span> <span class="o">=</span> <span class="n">RpcClientFactory</span><span class="o">.</span><span class="na">getDefaultInstance</span><span class="o">(</span><span class="n">hostname</span><span class="o">,</span> <span class="n">port</span><span class="o">);</span>
+      <span class="c1">// Use the following method to create a thrift client (instead of the above line):</span>
+      <span class="c1">// this.client = RpcClientFactory.getThriftInstance(hostname, port);</span>
+    <span class="o">}</span>
+  <span class="o">}</span>
+
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">cleanUp</span><span class="o">()</span> <span class="o">{</span>
+    <span class="c1">// Close the RPC connection</span>
+    <span class="n">client</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+  <span class="o">}</span>
+
+<span class="o">}</span>
+</pre></div>
+</div>
+<p>The remote Flume agent needs to have an <tt class="docutils literal"><span class="pre">AvroSource</span></tt> (or a
+<tt class="docutils literal"><span class="pre">ThriftSource</span></tt> if you are using a Thrift client) listening on some port.
+Below is an example Flume agent configuration that&#8217;s waiting for a connection
+from MyApp:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">memory</span>
+
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">avro</span>
+<span class="c"># For using a thrift source set the following instead of the above line.</span>
+<span class="c"># a1.source.r1.type = thrift</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">41414</span>
+
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">logger</span>
+</pre></div>
+</div>
+<p>For more flexibility, the default Flume client implementations
+(<tt class="docutils literal"><span class="pre">NettyAvroRpcClient</span></tt> and <tt class="docutils literal"><span class="pre">ThriftRpcClient</span></tt>) can be configured with these
+properties:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">client.type</span> <span class="o">=</span> <span class="s">default (for avro) or thrift (for thrift)</span>
+
+<span class="na">hosts</span> <span class="o">=</span> <span class="s">h1                           # default client accepts only 1 host</span>
+                                     <span class="c"># (additional hosts will be ignored)</span>
+
+<span class="na">hosts.h1</span> <span class="o">=</span> <span class="s">host1.example.org:41414   # host and port must both be specified</span>
+                                     <span class="c"># (neither has a default)</span>
+
+<span class="na">batch-size</span> <span class="o">=</span> <span class="s">100                     # Must be &gt;=1 (default: 100)</span>
+
+<span class="na">connect-timeout</span> <span class="o">=</span> <span class="s">20000              # Must be &gt;=1000 (default: 20000)</span>
+
+<span class="na">request-timeout</span> <span class="o">=</span> <span class="s">20000              # Must be &gt;=1000 (default: 20000)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="secure-rpc-client-thrift">
+<h5>Secure RPC client - Thrift<a class="headerlink" href="#secure-rpc-client-thrift" title="Permalink to this headline">¶</a></h5>
+<p>As of Flume 1.6.0, Thrift source and sink supports kerberos based authentication.
+The client needs to use the getThriftInstance method of <tt class="docutils literal"><span class="pre">SecureRpcClientFactory</span></tt>
+to get hold of a <tt class="docutils literal"><span class="pre">SecureThriftRpcClient</span></tt>. <tt class="docutils literal"><span class="pre">SecureThriftRpcClient</span></tt> extends
+<tt class="docutils literal"><span class="pre">ThriftRpcClient</span></tt> which implements the <tt class="docutils literal"><span class="pre">RpcClient</span></tt> interface. The kerberos
+authentication module resides in flume-ng-auth module which is
+required in classpath, when using the <tt class="docutils literal"><span class="pre">SecureRpcClientFactory</span></tt>. Both the client
+principal and the client keytab should be passed in as parameters through the
+properties and they reflect the credentials of the client to authenticate
+against the kerberos KDC. In addition, the server principal of the destination
+Thrift source to which this client is connecting to, should also be provided.
+The following example shows how to use the <tt class="docutils literal"><span class="pre">SecureRpcClientFactory</span></tt>
+within a user&#8217;s data-generating application:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="kn">import</span> <span class="nn">org.apache.flume.Event</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.EventDeliveryException</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.event.EventBuilder</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.api.SecureRpcClientFactory</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.api.RpcClientConfigurationConstants</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">org.apache.flume.api.RpcClient</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">java.nio.charset.Charset</span><span class="o">;</span>
+<span class="kn">import</span> <span class="nn">java.util.Properties</span><span class="o">;</span>
+
+<span class="kd">public</span> <span class="kd">class</span> <span class="nc">MyApp</span> <span class="o">{</span>
+  <span class="kd">public</span> <span class="kd">static</span> <span class="kt">void</span> <span class="nf">main</span><span class="o">(</span><span class="n">String</span><span class="o">[]</span> <span class="n">args</span><span class="o">)</span> <span class="o">{</span>
+    <span class="n">MySecureRpcClientFacade</span> <span class="n">client</span> <span class="o">=</span> <span class="k">new</span> <span class="n">MySecureRpcClientFacade</span><span class="o">();</span>
+    <span class="c1">// Initialize client with the remote Flume agent&#39;s host, port</span>
+    <span class="n">Properties</span> <span class="n">props</span> <span class="o">=</span> <span class="k">new</span> <span class="n">Properties</span><span class="o">();</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="n">RpcClientConfigurationConstants</span><span class="o">.</span><span class="na">CONFIG_CLIENT_TYPE</span><span class="o">,</span> <span class="s">&quot;thrift&quot;</span><span class="o">);</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="s">&quot;hosts&quot;</span><span class="o">,</span> <span class="s">&quot;h1&quot;</span><span class="o">);</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="s">&quot;hosts.h1&quot;</span><span class="o">,</span> <span class="s">&quot;client.example.org&quot;</span><span class="o">+</span><span class="s">&quot;:&quot;</span><span class="o">+</span> <span class="n">String</span><span class="o">.</span><span class="na">valueOf</span><span class="o">(</span><span class="mi">41414</span><span class="o">));</span>
+
+    <span class="c1">// Initialize client with the kerberos authentication related properties</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="s">&quot;kerberos&quot;</span><span class="o">,</span> <span class="s">&quot;true&quot;</span><span class="o">);</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="s">&quot;client-principal&quot;</span><span class="o">,</span> <span class="s">&quot;flumeclient/client.example.org@EXAMPLE.ORG&quot;</span><span class="o">);</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="s">&quot;client-keytab&quot;</span><span class="o">,</span> <span class="s">&quot;/tmp/flumeclient.keytab&quot;</span><span class="o">);</span>
+    <span class="n">props</span><span class="o">.</span><span class="na">setProperty</span><span class="o">(</span><span class="s">&quot;server-principal&quot;</span><span class="o">,</span> <span class="s">&quot;flume/server.example.org@EXAMPLE.ORG&quot;</span><span class="o">);</span>
+    <span class="n">client</span><span class="o">.</span><span class="na">init</span><span class="o">(</span><span class="n">props</span><span class="o">);</span>
+
+    <span class="c1">// Send 10 events to the remote Flume agent. That agent should be</span>
+    <span class="c1">// configured to listen with an AvroSource.</span>
+    <span class="n">String</span> <span class="n">sampleData</span> <span class="o">=</span> <span class="s">&quot;Hello Flume!&quot;</span><span class="o">;</span>
+    <span class="k">for</span> <span class="o">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="o">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">10</span><span class="o">;</span> <span class="n">i</span><span class="o">++)</span> <span class="o">{</span>
+      <span class="n">client</span><span class="o">.</span><span class="na">sendDataToFlume</span><span class="o">(</span><span class="n">sampleData</span><span class="o">);</span>
+    <span class="o">}</span>
+
+    <span class="n">client</span><span class="o">.</span><span class="na">cleanUp</span><span class="o">();</span>
+  <span class="o">}</span>
+<span class="o">}</span>
+
+<span class="kd">class</span> <span class="nc">MySecureRpcClientFacade</span> <span class="o">{</span>
+  <span class="kd">private</span> <span class="n">RpcClient</span> <span class="n">client</span><span class="o">;</span>
+  <span class="kd">private</span> <span class="n">Properties</span> <span class="n">properties</span><span class="o">;</span>
+
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">init</span><span class="o">(</span><span class="n">Properties</span> <span class="n">properties</span><span class="o">)</span> <span class="o">{</span>
+    <span class="c1">// Setup the RPC connection</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">properties</span> <span class="o">=</span> <span class="n">properties</span><span class="o">;</span>
+    <span class="c1">// Create the ThriftSecureRpcClient instance by using SecureRpcClientFactory</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">client</span> <span class="o">=</span> <span class="n">SecureRpcClientFactory</span><span class="o">.</span><span class="na">getThriftInstance</span><span class="o">(</span><span class="n">properties</span><span class="o">);</span>
+  <span class="o">}</span>
+
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">sendDataToFlume</span><span class="o">(</span><span class="n">String</span> <span class="n">data</span><span class="o">)</span> <span class="o">{</span>
+    <span class="c1">// Create a Flume Event object that encapsulates the sample data</span>
+    <span class="n">Event</span> <span class="n">event</span> <span class="o">=</span> <span class="n">EventBuilder</span><span class="o">.</span><span class="na">withBody</span><span class="o">(</span><span class="n">data</span><span class="o">,</span> <span class="n">Charset</span><span class="o">.</span><span class="na">forName</span><span class="o">(</span><span class="s">&quot;UTF-8&quot;</span><span class="o">));</span>
+
+    <span class="c1">// Send the event</span>
+    <span class="k">try</span> <span class="o">{</span>
+      <span class="n">client</span><span class="o">.</span><span class="na">append</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+    <span class="o">}</span> <span class="k">catch</span> <span class="o">(</span><span class="n">EventDeliveryException</span> <span class="n">e</span><span class="o">)</span> <span class="o">{</span>
+      <span class="c1">// clean up and recreate the client</span>
+      <span class="n">client</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+      <span class="n">client</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+      <span class="n">client</span> <span class="o">=</span> <span class="n">SecureRpcClientFactory</span><span class="o">.</span><span class="na">getThriftInstance</span><span class="o">(</span><span class="n">properties</span><span class="o">);</span>
+    <span class="o">}</span>
+  <span class="o">}</span>
+
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">cleanUp</span><span class="o">()</span> <span class="o">{</span>
+    <span class="c1">// Close the RPC connection</span>
+    <span class="n">client</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+  <span class="o">}</span>
+<span class="o">}</span>
+</pre></div>
+</div>
+<p>The remote <tt class="docutils literal"><span class="pre">ThriftSource</span></tt> should be started in kerberos mode.
+Below is an example Flume agent configuration that&#8217;s waiting for a connection
+from MyApp:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">memory</span>
+
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">thrift</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">41414</span>
+<span class="na">a1.sources.r1.kerberos</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sources.r1.agent-principal</span> <span class="o">=</span> <span class="s">flume/server.example.org@EXAMPLE.ORG</span>
+<span class="na">a1.sources.r1.agent-keytab</span> <span class="o">=</span> <span class="s">/tmp/flume.keytab</span>
+
+
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">logger</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="failover-client">
+<h5>Failover Client<a class="headerlink" href="#failover-client" title="Permalink to this headline">¶</a></h5>
+<p>This class wraps the default Avro RPC client to provide failover handling
+capability to clients. This takes a whitespace-separated list of &lt;host&gt;:&lt;port&gt;
+representing the Flume agents that make-up a failover group. The Failover RPC
+Client currently does not support thrift. If there’s a
+communication error with the currently selected host (i.e. agent) agent,
+then the failover client automatically fails-over to the next host in the list.
+For example:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="c1">// Setup properties for the failover</span>
+<span class="n">Properties</span> <span class="n">props</span> <span class="o">=</span> <span class="k">new</span> <span class="n">Properties</span><span class="o">();</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;client.type&quot;</span><span class="o">,</span> <span class="s">&quot;default_failover&quot;</span><span class="o">);</span>
+
+<span class="c1">// List of hosts (space-separated list of user-chosen host aliases)</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts&quot;</span><span class="o">,</span> <span class="s">&quot;h1 h2 h3&quot;</span><span class="o">);</span>
+
+<span class="c1">// host/port pair for each host alias</span>
+<span class="n">String</span> <span class="n">host1</span> <span class="o">=</span> <span class="s">&quot;host1.example.org:41414&quot;</span><span class="o">;</span>
+<span class="n">String</span> <span class="n">host2</span> <span class="o">=</span> <span class="s">&quot;host2.example.org:41414&quot;</span><span class="o">;</span>
+<span class="n">String</span> <span class="n">host3</span> <span class="o">=</span> <span class="s">&quot;host3.example.org:41414&quot;</span><span class="o">;</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts.h1&quot;</span><span class="o">,</span> <span class="n">host1</span><span class="o">);</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts.h2&quot;</span><span class="o">,</span> <span class="n">host2</span><span class="o">);</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts.h3&quot;</span><span class="o">,</span> <span class="n">host3</span><span class="o">);</span>
+
+<span class="c1">// create the client with failover properties</span>
+<span class="n">RpcClient</span> <span class="n">client</span> <span class="o">=</span> <span class="n">RpcClientFactory</span><span class="o">.</span><span class="na">getInstance</span><span class="o">(</span><span class="n">props</span><span class="o">);</span>
+</pre></div>
+</div>
+<p>For more flexibility, the failover Flume client implementation
+(<tt class="docutils literal"><span class="pre">FailoverRpcClient</span></tt>) can be configured with these properties:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">client.type</span> <span class="o">=</span> <span class="s">default_failover</span>
+
+<span class="na">hosts</span> <span class="o">=</span> <span class="s">h1 h2 h3                     # at least one is required, but 2 or</span>
+                                     <span class="c"># more makes better sense</span>
+
+<span class="na">hosts.h1</span> <span class="o">=</span> <span class="s">host1.example.org:41414</span>
+
+<span class="na">hosts.h2</span> <span class="o">=</span> <span class="s">host2.example.org:41414</span>
+
+<span class="na">hosts.h3</span> <span class="o">=</span> <span class="s">host3.example.org:41414</span>
+
+<span class="na">max-attempts</span> <span class="o">=</span> <span class="s">3                     # Must be &gt;=0 (default: number of hosts</span>
+                                     <span class="c"># specified, 3 in this case). A &#39;0&#39;</span>
+                                     <span class="c"># value doesn&#39;t make much sense because</span>
+                                     <span class="c"># it will just cause an append call to</span>
+                                     <span class="c"># immmediately fail. A &#39;1&#39; value means</span>
+                                     <span class="c"># that the failover client will try only</span>
+                                     <span class="c"># once to send the Event, and if it</span>
+                                     <span class="c"># fails then there will be no failover</span>
+                                     <span class="c"># to a second client, so this value</span>
+                                     <span class="c"># causes the failover client to</span>
+                                     <span class="c"># degenerate into just a default client.</span>
+                                     <span class="c"># It makes sense to set this value to at</span>
+                                     <span class="c"># least the number of hosts that you</span>
+                                     <span class="c"># specified.</span>
+
+<span class="na">batch-size</span> <span class="o">=</span> <span class="s">100                     # Must be &gt;=1 (default: 100)</span>
+
+<span class="na">connect-timeout</span> <span class="o">=</span> <span class="s">20000              # Must be &gt;=1000 (default: 20000)</span>
+
+<span class="na">request-timeout</span> <span class="o">=</span> <span class="s">20000              # Must be &gt;=1000 (default: 20000)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="loadbalancing-rpc-client">
+<h5>LoadBalancing RPC client<a class="headerlink" href="#loadbalancing-rpc-client" title="Permalink to this headline">¶</a></h5>
+<p>The Flume Client SDK also supports an RpcClient which load-balances among
+multiple hosts. This type of client takes a whitespace-separated list of
+&lt;host&gt;:&lt;port&gt; representing the Flume agents that make-up a load-balancing group.
+This client can be configured with a load balancing strategy that either
+randomly selects one of the configured hosts, or selects a host in a round-robin
+fashion. You can also specify your own custom class that implements the
+<tt class="docutils literal"><span class="pre">LoadBalancingRpcClient$HostSelector</span></tt> interface so that a custom selection
+order is used. In that case, the FQCN of the custom class needs to be specified
+as the value of the <tt class="docutils literal"><span class="pre">host-selector</span></tt> property. The LoadBalancing RPC Client
+currently does not support thrift.</p>
+<p>If <tt class="docutils literal"><span class="pre">backoff</span></tt> is enabled then the client will temporarily blacklist
+hosts that fail, causing them to be excluded from being selected as a failover
+host until a given timeout. When the timeout elapses, if the host is still
+unresponsive then this is considered a sequential failure, and the timeout is
+increased exponentially to avoid potentially getting stuck in long waits on
+unresponsive hosts.</p>
+<p>The maximum backoff time can be configured by setting <tt class="docutils literal"><span class="pre">maxBackoff</span></tt> (in
+milliseconds). The maxBackoff default is 30 seconds (specified in the
+<tt class="docutils literal"><span class="pre">OrderSelector</span></tt> class that&#8217;s the superclass of both load balancing
+strategies). The backoff timeout will increase exponentially with each
+sequential failure up to the maximum possible backoff timeout.
+The maximum possible backoff is limited to 65536 seconds (about 18.2 hours).
+For example:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="c1">// Setup properties for the load balancing</span>
+<span class="n">Properties</span> <span class="n">props</span> <span class="o">=</span> <span class="k">new</span> <span class="n">Properties</span><span class="o">();</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;client.type&quot;</span><span class="o">,</span> <span class="s">&quot;default_loadbalance&quot;</span><span class="o">);</span>
+
+<span class="c1">// List of hosts (space-separated list of user-chosen host aliases)</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts&quot;</span><span class="o">,</span> <span class="s">&quot;h1 h2 h3&quot;</span><span class="o">);</span>
+
+<span class="c1">// host/port pair for each host alias</span>
+<span class="n">String</span> <span class="n">host1</span> <span class="o">=</span> <span class="s">&quot;host1.example.org:41414&quot;</span><span class="o">;</span>
+<span class="n">String</span> <span class="n">host2</span> <span class="o">=</span> <span class="s">&quot;host2.example.org:41414&quot;</span><span class="o">;</span>
+<span class="n">String</span> <span class="n">host3</span> <span class="o">=</span> <span class="s">&quot;host3.example.org:41414&quot;</span><span class="o">;</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts.h1&quot;</span><span class="o">,</span> <span class="n">host1</span><span class="o">);</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts.h2&quot;</span><span class="o">,</span> <span class="n">host2</span><span class="o">);</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;hosts.h3&quot;</span><span class="o">,</span> <span class="n">host3</span><span class="o">);</span>
+
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;host-selector&quot;</span><span class="o">,</span> <span class="s">&quot;random&quot;</span><span class="o">);</span> <span class="c1">// For random host selection</span>
+<span class="c1">// props.put(&quot;host-selector&quot;, &quot;round_robin&quot;); // For round-robin host</span>
+<span class="c1">//                                            // selection</span>
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;backoff&quot;</span><span class="o">,</span> <span class="s">&quot;true&quot;</span><span class="o">);</span> <span class="c1">// Disabled by default.</span>
+
+<span class="n">props</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;maxBackoff&quot;</span><span class="o">,</span> <span class="s">&quot;10000&quot;</span><span class="o">);</span> <span class="c1">// Defaults 0, which effectively</span>
+                                  <span class="c1">// becomes 30000 ms</span>
+
+<span class="c1">// Create the client with load balancing properties</span>
+<span class="n">RpcClient</span> <span class="n">client</span> <span class="o">=</span> <span class="n">RpcClientFactory</span><span class="o">.</span><span class="na">getInstance</span><span class="o">(</span><span class="n">props</span><span class="o">);</span>
+</pre></div>
+</div>
+<p>For more flexibility, the load-balancing Flume client implementation
+(<tt class="docutils literal"><span class="pre">LoadBalancingRpcClient</span></tt>) can be configured with these properties:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">client.type</span> <span class="o">=</span> <span class="s">default_loadbalance</span>
+
+<span class="na">hosts</span> <span class="o">=</span> <span class="s">h1 h2 h3                     # At least 2 hosts are required</span>
+
+<span class="na">hosts.h1</span> <span class="o">=</span> <span class="s">host1.example.org:41414</span>
+
+<span class="na">hosts.h2</span> <span class="o">=</span> <span class="s">host2.example.org:41414</span>
+
+<span class="na">hosts.h3</span> <span class="o">=</span> <span class="s">host3.example.org:41414</span>
+
+<span class="na">backoff</span> <span class="o">=</span> <span class="s">false                      # Specifies whether the client should</span>
+                                     <span class="c"># back-off from (i.e. temporarily</span>
+                                     <span class="c"># blacklist) a failed host</span>
+                                     <span class="c"># (default: false).</span>
+
+<span class="na">maxBackoff</span> <span class="o">=</span> <span class="s">0                       # Max timeout in millis that a will</span>
+                                     <span class="c"># remain inactive due to a previous</span>
+                                     <span class="c"># failure with that host (default: 0,</span>
+                                     <span class="c"># which effectively becomes 30000)</span>
+
+<span class="na">host-selector</span> <span class="o">=</span> <span class="s">round_robin          # The host selection strategy used</span>
+                                     <span class="c"># when load-balancing among hosts</span>
+                                     <span class="c"># (default: round_robin).</span>
+                                     <span class="c"># Other values are include &quot;random&quot;</span>
+                                     <span class="c"># or the FQCN of a custom class</span>
+                                     <span class="c"># that implements</span>
+                                     <span class="c"># LoadBalancingRpcClient$HostSelector</span>
+
+<span class="na">batch-size</span> <span class="o">=</span> <span class="s">100                     # Must be &gt;=1 (default: 100)</span>
+
+<span class="na">connect-timeout</span> <span class="o">=</span> <span class="s">20000              # Must be &gt;=1000 (default: 20000)</span>
+
+<span class="na">request-timeout</span> <span class="o">=</span> <span class="s">20000              # Must be &gt;=1000 (default: 20000)</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="embedded-agent">
+<h4>Embedded agent<a class="headerlink" href="#embedded-agent" title="Permalink to this headline">¶</a></h4>
+<p>Flume has an embedded agent api which allows users to embed an agent in their
+application. This agent is meant to be lightweight and as such not all
+sources, sinks, and channels are allowed. Specifically the source used
+is a special embedded source and events should be send to the source
+via the put, putAll methods on the EmbeddedAgent object. Only File Channel
+and Memory Channel are allowed as channels while Avro Sink is the only
+supported sink. Interceptors are also supported by the embedded agent.</p>
+<p>Note: The embedded agent has a dependency on hadoop-core.jar.</p>
+<p>Configuration of an Embedded Agent is similar to configuration of a
+full Agent. The following is an exhaustive list of configration options:</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="15%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>source.type</td>
+<td>embedded</td>
+<td>The only available source is the embedded source.</td>
+</tr>
+<tr class="row-odd"><td><strong>channel.type</strong></td>
+<td>&#8211;</td>
+<td>Either <tt class="docutils literal"><span class="pre">memory</span></tt> or <tt class="docutils literal"><span class="pre">file</span></tt> which correspond
+to MemoryChannel and FileChannel respectively.</td>
+</tr>
+<tr class="row-even"><td>channel.*</td>
+<td>&#8211;</td>
+<td>Configuration options for the channel type requested,
+see MemoryChannel or FileChannel user guide for an exhaustive list.</td>
+</tr>
+<tr class="row-odd"><td><strong>sinks</strong></td>
+<td>&#8211;</td>
+<td>List of sink names</td>
+</tr>
+<tr class="row-even"><td><strong>sink.type</strong></td>
+<td>&#8211;</td>
+<td>Property name must match a name in the list of sinks.
+Value must be <tt class="docutils literal"><span class="pre">avro</span></tt></td>
+</tr>
+<tr class="row-odd"><td>sink.*</td>
+<td>&#8211;</td>
+<td>Configuration options for the sink.
+See AvroSink user guide for an exhaustive list,
+however note AvroSink requires at least hostname and port.</td>
+</tr>
+<tr class="row-even"><td><strong>processor.type</strong></td>
+<td>&#8211;</td>
+<td>Either <tt class="docutils literal"><span class="pre">failover</span></tt> or <tt class="docutils literal"><span class="pre">load_balance</span></tt> which correspond
+to FailoverSinksProcessor and LoadBalancingSinkProcessor respectively.</td>
+</tr>
+<tr class="row-odd"><td>processor.*</td>
+<td>&#8211;</td>
+<td>Configuration options for the sink processor selected.
+See FailoverSinksProcessor and LoadBalancingSinkProcessor
+user guide for an exhaustive list.</td>
+</tr>
+<tr class="row-even"><td>source.interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>source.interceptors.*</td>
+<td>&#8211;</td>
+<td>Configuration options for individual interceptors
+specified in the source.interceptors property</td>
+</tr>
+</tbody>
+</table>
+<p>Below is an example of how to use the agent:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="n">Map</span><span class="o">&lt;</span><span class="n">String</span><span class="o">,</span> <span class="n">String</span><span class="o">&gt;</span> <span class="n">properties</span> <span class="o">=</span> <span class="k">new</span> <span class="n">HashMap</span><span class="o">&lt;</span><span class="n">String</span><span class="o">,</span> <span class="n">String</span><span class="o">&gt;();</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;channel.type&quot;</span><span class="o">,</span> <span class="s">&quot;memory&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;channel.capacity&quot;</span><span class="o">,</span> <span class="s">&quot;200&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sinks&quot;</span><span class="o">,</span> <span class="s">&quot;sink1 sink2&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sink1.type&quot;</span><span class="o">,</span> <span class="s">&quot;avro&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sink2.type&quot;</span><span class="o">,</span> <span class="s">&quot;avro&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sink1.hostname&quot;</span><span class="o">,</span> <span class="s">&quot;collector1.apache.org&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sink1.port&quot;</span><span class="o">,</span> <span class="s">&quot;5564&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sink2.hostname&quot;</span><span class="o">,</span> <span class="s">&quot;collector2.apache.org&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;sink2.port&quot;</span><span class="o">,</span>  <span class="s">&quot;5565&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;processor.type&quot;</span><span class="o">,</span> <span class="s">&quot;load_balance&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;source.interceptors&quot;</span><span class="o">,</span> <span class="s">&quot;i1&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;source.interceptors.i1.type&quot;</span><span class="o">,</span> <span class="s">&quot;static&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;source.interceptors.i1.key&quot;</span><span class="o">,</span> <span class="s">&quot;key1&quot;</span><span class="o">);</span>
+<span class="n">properties</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;source.interceptors.i1.value&quot;</span><span class="o">,</span> <span class="s">&quot;value1&quot;</span><span class="o">);</span>
+
+<span class="n">EmbeddedAgent</span> <span class="n">agent</span> <span class="o">=</span> <span class="k">new</span> <span class="n">EmbeddedAgent</span><span class="o">(</span><span class="s">&quot;myagent&quot;</span><span class="o">);</span>
+
+<span class="n">agent</span><span class="o">.</span><span class="na">configure</span><span class="o">(</span><span class="n">properties</span><span class="o">);</span>
+<span class="n">agent</span><span class="o">.</span><span class="na">start</span><span class="o">();</span>
+
+<span class="n">List</span><span class="o">&lt;</span><span class="n">Event</span><span class="o">&gt;</span> <span class="n">events</span> <span class="o">=</span> <span class="n">Lists</span><span class="o">.</span><span class="na">newArrayList</span><span class="o">();</span>
+
+<span class="n">events</span><span class="o">.</span><span class="na">add</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+<span class="n">events</span><span class="o">.</span><span class="na">add</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+<span class="n">events</span><span class="o">.</span><span class="na">add</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+<span class="n">events</span><span class="o">.</span><span class="na">add</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+
+<span class="n">agent</span><span class="o">.</span><span class="na">putAll</span><span class="o">(</span><span class="n">events</span><span class="o">);</span>
+
+<span class="o">...</span>
+
+<span class="n">agent</span><span class="o">.</span><span class="na">stop</span><span class="o">();</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="transaction-interface">
+<h4>Transaction interface<a class="headerlink" href="#transaction-interface" title="Permalink to this headline">¶</a></h4>
+<p>The <tt class="docutils literal"><span class="pre">Transaction</span></tt> interface is the basis of reliability for Flume. All the
+major components (ie. <tt class="docutils literal"><span class="pre">Source</span></tt>s, <tt class="docutils literal"><span class="pre">Sink</span></tt>s and <tt class="docutils literal"><span class="pre">Channel</span></tt>s) must use a
+Flume <tt class="docutils literal"><span class="pre">Transaction</span></tt>.</p>
+<div class="figure align-center">
+<img alt="Transaction sequence diagram" src="_images/DevGuide_image01.png" />
+</div>
+<p>A <tt class="docutils literal"><span class="pre">Transaction</span></tt> is implemented within a <tt class="docutils literal"><span class="pre">Channel</span></tt> implementation. Each
+<tt class="docutils literal"><span class="pre">Source</span></tt> and <tt class="docutils literal"><span class="pre">Sink</span></tt> that is connected to a <tt class="docutils literal"><span class="pre">Channel</span></tt> must obtain a
+<tt class="docutils literal"><span class="pre">Transaction</span></tt> object. The <tt class="docutils literal"><span class="pre">Source</span></tt>s use a <tt class="docutils literal"><span class="pre">ChannelProcessor</span></tt>
+to manage the <tt class="docutils literal"><span class="pre">Transaction</span></tt>s, the <tt class="docutils literal"><span class="pre">Sink</span></tt>s manage them explicitly via
+their configured <tt class="docutils literal"><span class="pre">Channel</span></tt>. The operation to stage an
+<tt class="docutils literal"><span class="pre">Event</span></tt> (put it into a <tt class="docutils literal"><span class="pre">Channel</span></tt>) or extract an <tt class="docutils literal"><span class="pre">Event</span></tt> (take it out of a
+<tt class="docutils literal"><span class="pre">Channel</span></tt>) is done inside an active <tt class="docutils literal"><span class="pre">Transaction</span></tt>. For example:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="n">Channel</span> <span class="n">ch</span> <span class="o">=</span> <span class="k">new</span> <span class="n">MemoryChannel</span><span class="o">();</span>
+<span class="n">Transaction</span> <span class="n">txn</span> <span class="o">=</span> <span class="n">ch</span><span class="o">.</span><span class="na">getTransaction</span><span class="o">();</span>
+<span class="n">txn</span><span class="o">.</span><span class="na">begin</span><span class="o">();</span>
+<span class="k">try</span> <span class="o">{</span>
+  <span class="c1">// This try clause includes whatever Channel operations you want to do</span>
+
+  <span class="n">Event</span> <span class="n">eventToStage</span> <span class="o">=</span> <span class="n">EventBuilder</span><span class="o">.</span><span class="na">withBody</span><span class="o">(</span><span class="s">&quot;Hello Flume!&quot;</span><span class="o">,</span>
+                       <span class="n">Charset</span><span class="o">.</span><span class="na">forName</span><span class="o">(</span><span class="s">&quot;UTF-8&quot;</span><span class="o">));</span>
+  <span class="n">ch</span><span class="o">.</span><span class="na">put</span><span class="o">(</span><span class="n">eventToStage</span><span class="o">);</span>
+  <span class="c1">// Event takenEvent = ch.take();</span>
+  <span class="c1">// ...</span>
+  <span class="n">txn</span><span class="o">.</span><span class="na">commit</span><span class="o">();</span>
+<span class="o">}</span> <span class="k">catch</span> <span class="o">(</span><span class="n">Throwable</span> <span class="n">t</span><span class="o">)</span> <span class="o">{</span>
+  <span class="n">txn</span><span class="o">.</span><span class="na">rollback</span><span class="o">();</span>
+
+  <span class="c1">// Log exception, handle individual exceptions as needed</span>
+
+  <span class="c1">// re-throw all Errors</span>
+  <span class="k">if</span> <span class="o">(</span><span class="n">t</span> <span class="k">instanceof</span> <span class="n">Error</span><span class="o">)</span> <span class="o">{</span>
+    <span class="k">throw</span> <span class="o">(</span><span class="n">Error</span><span class="o">)</span><span class="n">t</span><span class="o">;</span>
+  <span class="o">}</span>
+<span class="o">}</span> <span class="k">finally</span> <span class="o">{</span>
+  <span class="n">txn</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+<span class="o">}</span>
+</pre></div>
+</div>
+<p>Here we get hold of a <tt class="docutils literal"><span class="pre">Transaction</span></tt> from a <tt class="docutils literal"><span class="pre">Channel</span></tt>. After <tt class="docutils literal"><span class="pre">begin()</span></tt>
+returns, the <tt class="docutils literal"><span class="pre">Transaction</span></tt> is now active/open and the <tt class="docutils literal"><span class="pre">Event</span></tt> is then put
+into the <tt class="docutils literal"><span class="pre">Channel</span></tt>. If the put is successful, then the <tt class="docutils literal"><span class="pre">Transaction</span></tt> is
+committed and closed.</p>
+</div>
+<div class="section" id="sink">
+<h4>Sink<a class="headerlink" href="#sink" title="Permalink to this headline">¶</a></h4>
+<p>The purpose of a <tt class="docutils literal"><span class="pre">Sink</span></tt> to extract <tt class="docutils literal"><span class="pre">Event</span></tt>s from the <tt class="docutils literal"><span class="pre">Channel</span></tt> and
+forward them to the next Flume Agent in the flow or store them in an external
+repository. A <tt class="docutils literal"><span class="pre">Sink</span></tt> is associated with exactly one <tt class="docutils literal"><span class="pre">Channel</span></tt>s, as
+configured in the Flume properties file. There’s one <tt class="docutils literal"><span class="pre">SinkRunner</span></tt> instance
+associated with every configured <tt class="docutils literal"><span class="pre">Sink</span></tt>, and when the Flume framework calls
+<tt class="docutils literal"><span class="pre">SinkRunner.start()</span></tt>, a new thread is created to drive the <tt class="docutils literal"><span class="pre">Sink</span></tt> (using
+<tt class="docutils literal"><span class="pre">SinkRunner.PollingRunner</span></tt> as the thread&#8217;s <tt class="docutils literal"><span class="pre">Runnable</span></tt>). This thread manages
+the <tt class="docutils literal"><span class="pre">Sink</span></tt>’s lifecycle. The <tt class="docutils literal"><span class="pre">Sink</span></tt> needs to implement the <tt class="docutils literal"><span class="pre">start()</span></tt> and
+<tt class="docutils literal"><span class="pre">stop()</span></tt> methods that are part of the <tt class="docutils literal"><span class="pre">LifecycleAware</span></tt> interface. The
+<tt class="docutils literal"><span class="pre">Sink.start()</span></tt> method should initialize the <tt class="docutils literal"><span class="pre">Sink</span></tt> and bring it to a state
+where it can forward the <tt class="docutils literal"><span class="pre">Event</span></tt>s to its next destination.  The
+<tt class="docutils literal"><span class="pre">Sink.process()</span></tt> method should do the core processing of extracting the
+<tt class="docutils literal"><span class="pre">Event</span></tt> from the <tt class="docutils literal"><span class="pre">Channel</span></tt> and forwarding it. The <tt class="docutils literal"><span class="pre">Sink.stop()</span></tt> method
+should do the necessary cleanup (e.g. releasing resources). The <tt class="docutils literal"><span class="pre">Sink</span></tt>
+implementation also needs to implement the <tt class="docutils literal"><span class="pre">Configurable</span></tt> interface for
+processing its own configuration settings. For example:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="kd">public</span> <span class="kd">class</span> <span class="nc">MySink</span> <span class="kd">extends</span> <span class="n">AbstractSink</span> <span class="kd">implements</span> <span class="n">Configurable</span> <span class="o">{</span>
+  <span class="kd">private</span> <span class="n">String</span> <span class="n">myProp</span><span class="o">;</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">configure</span><span class="o">(</span><span class="n">Context</span> <span class="n">context</span><span class="o">)</span> <span class="o">{</span>
+    <span class="n">String</span> <span class="n">myProp</span> <span class="o">=</span> <span class="n">context</span><span class="o">.</span><span class="na">getString</span><span class="o">(</span><span class="s">&quot;myProp&quot;</span><span class="o">,</span> <span class="s">&quot;defaultValue&quot;</span><span class="o">);</span>
+
+    <span class="c1">// Process the myProp value (e.g. validation)</span>
+
+    <span class="c1">// Store myProp for later retrieval by process() method</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">myProp</span> <span class="o">=</span> <span class="n">myProp</span><span class="o">;</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">start</span><span class="o">()</span> <span class="o">{</span>
+    <span class="c1">// Initialize the connection to the external repository (e.g. HDFS) that</span>
+    <span class="c1">// this Sink will forward Events to ..</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">stop</span> <span class="o">()</span> <span class="o">{</span>
+    <span class="c1">// Disconnect from the external respository and do any</span>
+    <span class="c1">// additional cleanup (e.g. releasing resources or nulling-out</span>
+    <span class="c1">// field values) ..</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="n">Status</span> <span class="nf">process</span><span class="o">()</span> <span class="kd">throws</span> <span class="n">EventDeliveryException</span> <span class="o">{</span>
+    <span class="n">Status</span> <span class="n">status</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+
+    <span class="c1">// Start transaction</span>
+    <span class="n">Channel</span> <span class="n">ch</span> <span class="o">=</span> <span class="n">getChannel</span><span class="o">();</span>
+    <span class="n">Transaction</span> <span class="n">txn</span> <span class="o">=</span> <span class="n">ch</span><span class="o">.</span><span class="na">getTransaction</span><span class="o">();</span>
+    <span class="n">txn</span><span class="o">.</span><span class="na">begin</span><span class="o">();</span>
+    <span class="k">try</span> <span class="o">{</span>
+      <span class="c1">// This try clause includes whatever Channel operations you want to do</span>
+
+      <span class="n">Event</span> <span class="n">event</span> <span class="o">=</span> <span class="n">ch</span><span class="o">.</span><span class="na">take</span><span class="o">();</span>
+
+      <span class="c1">// Send the Event to the external repository.</span>
+      <span class="c1">// storeSomeData(e);</span>
+
+      <span class="n">txn</span><span class="o">.</span><span class="na">commit</span><span class="o">();</span>
+      <span class="n">status</span> <span class="o">=</span> <span class="n">Status</span><span class="o">.</span><span class="na">READY</span><span class="o">;</span>
+    <span class="o">}</span> <span class="k">catch</span> <span class="o">(</span><span class="n">Throwable</span> <span class="n">t</span><span class="o">)</span> <span class="o">{</span>
+      <span class="n">txn</span><span class="o">.</span><span class="na">rollback</span><span class="o">();</span>
+
+      <span class="c1">// Log exception, handle individual exceptions as needed</span>
+
+      <span class="n">status</span> <span class="o">=</span> <span class="n">Status</span><span class="o">.</span><span class="na">BACKOFF</span><span class="o">;</span>
+
+      <span class="c1">// re-throw all Errors</span>
+      <span class="k">if</span> <span class="o">(</span><span class="n">t</span> <span class="k">instanceof</span> <span class="n">Error</span><span class="o">)</span> <span class="o">{</span>
+        <span class="k">throw</span> <span class="o">(</span><span class="n">Error</span><span class="o">)</span><span class="n">t</span><span class="o">;</span>
+      <span class="o">}</span>
+    <span class="o">}</span>
+    <span class="k">return</span> <span class="n">status</span><span class="o">;</span>
+  <span class="o">}</span>
+<span class="o">}</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="source">
+<h4>Source<a class="headerlink" href="#source" title="Permalink to this headline">¶</a></h4>
+<p>The purpose of a <tt class="docutils literal"><span class="pre">Source</span></tt> is to receive data from an external client and store
+it into the configured <tt class="docutils literal"><span class="pre">Channel</span></tt>s. A <tt class="docutils literal"><span class="pre">Source</span></tt> can get an instance of its own
+<tt class="docutils literal"><span class="pre">ChannelProcessor</span></tt> to process an <tt class="docutils literal"><span class="pre">Event</span></tt>, commited within a <tt class="docutils literal"><span class="pre">Channel</span></tt>
+local transaction, in serial. In the case of an exception, required
+<tt class="docutils literal"><span class="pre">Channel</span></tt>s will propagate the exception, all <tt class="docutils literal"><span class="pre">Channel</span></tt>s will rollback their
+transaction, but events processed previously on other <tt class="docutils literal"><span class="pre">Channel</span></tt>s will remain
+committed.</p>
+<p>Similar to the <tt class="docutils literal"><span class="pre">SinkRunner.PollingRunner</span></tt> <tt class="docutils literal"><span class="pre">Runnable</span></tt>, there’s
+a <tt class="docutils literal"><span class="pre">PollingRunner</span></tt> <tt class="docutils literal"><span class="pre">Runnable</span></tt> that executes on a thread created when the
+Flume framework calls <tt class="docutils literal"><span class="pre">PollableSourceRunner.start()</span></tt>. Each configured
+<tt class="docutils literal"><span class="pre">PollableSource</span></tt> is associated with its own thread that runs a
+<tt class="docutils literal"><span class="pre">PollingRunner</span></tt>. This thread manages the <tt class="docutils literal"><span class="pre">PollableSource</span></tt>’s lifecycle,
+such as starting and stopping. A <tt class="docutils literal"><span class="pre">PollableSource</span></tt> implementation must
+implement the <tt class="docutils literal"><span class="pre">start()</span></tt> and <tt class="docutils literal"><span class="pre">stop()</span></tt> methods that are declared in the
+<tt class="docutils literal"><span class="pre">LifecycleAware</span></tt> interface. The runner of a <tt class="docutils literal"><span class="pre">PollableSource</span></tt> invokes that
+<tt class="docutils literal"><span class="pre">Source</span></tt>&#8216;s <tt class="docutils literal"><span class="pre">process()</span></tt> method. The <tt class="docutils literal"><span class="pre">process()</span></tt> method should check for
+new data and store it into the <tt class="docutils literal"><span class="pre">Channel</span></tt> as Flume <tt class="docutils literal"><span class="pre">Event</span></tt>s.</p>
+<p>Note that there are actually two types of <tt class="docutils literal"><span class="pre">Source</span></tt>s. The <tt class="docutils literal"><span class="pre">PollableSource</span></tt>
+was already mentioned. The other is the <tt class="docutils literal"><span class="pre">EventDrivenSource</span></tt>.  The
+<tt class="docutils literal"><span class="pre">EventDrivenSource</span></tt>, unlike the <tt class="docutils literal"><span class="pre">PollableSource</span></tt>, must have its own callback
+mechanism that captures the new data and stores it into the <tt class="docutils literal"><span class="pre">Channel</span></tt>. The
+<tt class="docutils literal"><span class="pre">EventDrivenSource</span></tt>s are not each driven by their own thread like the
+<tt class="docutils literal"><span class="pre">PollableSource</span></tt>s are. Below is an example of a custom <tt class="docutils literal"><span class="pre">PollableSource</span></tt>:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="kd">public</span> <span class="kd">class</span> <span class="nc">MySource</span> <span class="kd">extends</span> <span class="n">AbstractSource</span> <span class="kd">implements</span> <span class="n">Configurable</span><span class="o">,</span> <span class="n">PollableSource</span> <span class="o">{</span>
+  <span class="kd">private</span> <span class="n">String</span> <span class="n">myProp</span><span class="o">;</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">configure</span><span class="o">(</span><span class="n">Context</span> <span class="n">context</span><span class="o">)</span> <span class="o">{</span>
+    <span class="n">String</span> <span class="n">myProp</span> <span class="o">=</span> <span class="n">context</span><span class="o">.</span><span class="na">getString</span><span class="o">(</span><span class="s">&quot;myProp&quot;</span><span class="o">,</span> <span class="s">&quot;defaultValue&quot;</span><span class="o">);</span>
+
+    <span class="c1">// Process the myProp value (e.g. validation, convert to another type, ...)</span>
+
+    <span class="c1">// Store myProp for later retrieval by process() method</span>
+    <span class="k">this</span><span class="o">.</span><span class="na">myProp</span> <span class="o">=</span> <span class="n">myProp</span><span class="o">;</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">start</span><span class="o">()</span> <span class="o">{</span>
+    <span class="c1">// Initialize the connection to the external client</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">stop</span> <span class="o">()</span> <span class="o">{</span>
+    <span class="c1">// Disconnect from external client and do any additional cleanup</span>
+    <span class="c1">// (e.g. releasing resources or nulling-out field values) ..</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="n">Status</span> <span class="nf">process</span><span class="o">()</span> <span class="kd">throws</span> <span class="n">EventDeliveryException</span> <span class="o">{</span>
+    <span class="n">Status</span> <span class="n">status</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+
+    <span class="k">try</span> <span class="o">{</span>
+      <span class="c1">// This try clause includes whatever Channel/Event operations you want to do</span>
+
+      <span class="c1">// Receive new data</span>
+      <span class="n">Event</span> <span class="n">e</span> <span class="o">=</span> <span class="n">getSomeData</span><span class="o">();</span>
+
+      <span class="c1">// Store the Event into this Source&#39;s associated Channel(s)</span>
+      <span class="n">getChannelProcessor</span><span class="o">().</span><span class="na">processEvent</span><span class="o">(</span><span class="n">e</span><span class="o">);</span>
+
+      <span class="n">status</span> <span class="o">=</span> <span class="n">Status</span><span class="o">.</span><span class="na">READY</span><span class="o">;</span>
+    <span class="o">}</span> <span class="k">catch</span> <span class="o">(</span><span class="n">Throwable</span> <span class="n">t</span><span class="o">)</span> <span class="o">{</span>
+      <span class="c1">// Log exception, handle individual exceptions as needed</span>
+
+      <span class="n">status</span> <span class="o">=</span> <span class="n">Status</span><span class="o">.</span><span class="na">BACKOFF</span><span class="o">;</span>
+
+      <span class="c1">// re-throw all Errors</span>
+      <span class="k">if</span> <span class="o">(</span><span class="n">t</span> <span class="k">instanceof</span> <span class="n">Error</span><span class="o">)</span> <span class="o">{</span>
+        <span class="k">throw</span> <span class="o">(</span><span class="n">Error</span><span class="o">)</span><span class="n">t</span><span class="o">;</span>
+      <span class="o">}</span>
+    <span class="o">}</span> <span class="k">finally</span> <span class="o">{</span>
+      <span class="n">txn</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+    <span class="o">}</span>
+    <span class="k">return</span> <span class="n">status</span><span class="o">;</span>
+  <span class="o">}</span>
+<span class="o">}</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="channel">
+<h4>Channel<a class="headerlink" href="#channel" title="Permalink to this headline">¶</a></h4>
+<p>TBD</p>
+</div>
+<div class="section" id="initializable">
+<h4>Initializable<a class="headerlink" href="#initializable" title="Permalink to this headline">¶</a></h4>
+<p>As of Flume 1.10.0 Sources, Sinks, and Channels may implement the Intitializable interface. Doing so
+allows the component to have access the materialized configuration before any of the components have been
+started.</p>
+<p>This example shows a Sink being configured with the name of a Source. While initializing it will
+retrieve the Source from the configuration and save it. During event processing a new event will be
+sent to the Source, presumably after the event has be modified in some way.</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="kd">public</span> <span class="kd">class</span> <span class="nc">NullInitSink</span> <span class="kd">extends</span> <span class="n">NullSink</span> <span class="kd">implements</span> <span class="n">Initializable</span> <span class="o">{</span>
+
+  <span class="kd">private</span> <span class="kd">static</span> <span class="kd">final</span> <span class="n">Logger</span> <span class="n">logger</span> <span class="o">=</span> <span class="n">LoggerFactory</span><span class="o">.</span><span class="na">getLogger</span><span class="o">(</span><span class="n">NullInitSink</span><span class="o">.</span><span class="na">class</span><span class="o">);</span>
+  <span class="kd">private</span> <span class="n">String</span> <span class="n">sourceName</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+  <span class="kd">private</span> <span class="n">EventProcessor</span> <span class="n">eventProcessor</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+  <span class="kd">private</span> <span class="kt">long</span> <span class="n">total</span> <span class="o">=</span> <span class="mi">0</span><span class="o">;</span>
+
+  <span class="kd">public</span> <span class="nf">NullInitSink</span><span class="o">()</span> <span class="o">{</span>
+    <span class="kd">super</span><span class="o">();</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">configure</span><span class="o">(</span><span class="n">Context</span> <span class="n">context</span><span class="o">)</span> <span class="o">{</span>
+    <span class="n">sourceName</span> <span class="o">=</span> <span class="n">context</span><span class="o">.</span><span class="na">getString</span><span class="o">(</span><span class="s">&quot;targetSource&quot;</span><span class="o">);</span>
+    <span class="kd">super</span><span class="o">.</span><span class="na">configure</span><span class="o">(</span><span class="n">context</span><span class="o">);</span>
+
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">initialize</span><span class="o">(</span><span class="n">MaterializedConfiguration</span> <span class="n">configuration</span><span class="o">)</span> <span class="o">{</span>
+    <span class="n">logger</span><span class="o">.</span><span class="na">debug</span><span class="o">(</span><span class="s">&quot;Locating source for event publishing&quot;</span><span class="o">);</span>
+    <span class="k">for</span> <span class="o">(</span><span class="n">Map</span><span class="o">.</span><span class="na">Entry</span><span class="o">&lt;</span><span class="n">String</span><span class="o">,</span> <span class="n">SourceRunner</span><span class="o">&gt;</span>  <span class="n">entry</span> <span class="o">:</span> <span class="n">configuration</span><span class="o">.</span><span class="na">getSourceRunners</span><span class="o">().</span><span class="na">entrySet</span>< [...]
+      <span class="k">if</span> <span class="o">(</span><span class="n">entry</span><span class="o">.</span><span class="na">getKey</span><span class="o">().</span><span class="na">equals</span><span class="o">(</span><span class="n">sourceName</span><span class="o">))</span> <span class="o">{</span>
+        <span class="n">Source</span> <span class="n">source</span> <span class="o">=</span> <span class="n">entry</span><span class="o">.</span><span class="na">getValue</span><span class="o">().</span><span class="na">getSource</span><span class="o">();</span>
+        <span class="k">if</span> <span class="o">(</span><span class="n">source</span> <span class="k">instanceof</span> <span class="n">EventProcessor</span><span class="o">)</span> <span class="o">{</span>
+          <span class="n">eventProcessor</span> <span class="o">=</span> <span class="o">(</span><span class="n">EventProcessor</span><span class="o">)</span> <span class="n">source</span><span class="o">;</span>
+          <span class="n">logger</span><span class="o">.</span><span class="na">debug</span><span class="o">(</span><span class="s">&quot;Found event processor {}&quot;</span><span class="o">,</span> <span class="n">source</span><span class="o">.</span><span class="na">getName</span><span class="o">());</span>
+          <span class="k">return</span><span class="o">;</span>
+        <span class="o">}</span>
+      <span class="o">}</span>
+    <span class="o">}</span>
+    <span class="n">logger</span><span class="o">.</span><span class="na">warn</span><span class="o">(</span><span class="s">&quot;No Source named {} found for republishing events.&quot;</span><span class="o">,</span> <span class="n">sourceName</span><span class="o">);</span>
+  <span class="o">}</span>
+
+  <span class="nd">@Override</span>
+  <span class="kd">public</span> <span class="n">Status</span> <span class="nf">process</span><span class="o">()</span> <span class="kd">throws</span> <span class="n">EventDeliveryException</span> <span class="o">{</span>
+    <span class="n">Status</span> <span class="n">status</span> <span class="o">=</span> <span class="n">Status</span><span class="o">.</span><span class="na">READY</span><span class="o">;</span>
+
+    <span class="n">Channel</span> <span class="n">channel</span> <span class="o">=</span> <span class="n">getChannel</span><span class="o">();</span>
+    <span class="n">Transaction</span> <span class="n">transaction</span> <span class="o">=</span> <span class="n">channel</span><span class="o">.</span><span class="na">getTransaction</span><span class="o">();</span>
+    <span class="n">Event</span> <span class="n">event</span> <span class="o">=</span> <span class="kc">null</span><span class="o">;</span>
+    <span class="n">CounterGroup</span> <span class="n">counterGroup</span> <span class="o">=</span> <span class="n">getCounterGroup</span><span class="o">();</span>
+    <span class="kt">long</span> <span class="n">batchSize</span> <span class="o">=</span> <span class="n">getBatchSize</span><span class="o">();</span>
+    <span class="kt">long</span> <span class="n">eventCounter</span> <span class="o">=</span> <span class="n">counterGroup</span><span class="o">.</span><span class="na">get</span><span class="o">(</span><span class="s">&quot;events.success&quot;</span><span class="o">);</span>
+
+    <span class="k">try</span> <span class="o">{</span>
+      <span class="n">transaction</span><span class="o">.</span><span class="na">begin</span><span class="o">();</span>
+      <span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="o">;</span>
+      <span class="k">for</span> <span class="o">(</span><span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="o">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="n">batchSize</span><span class="o">;</span> <span class="n">i</span><span class="o">++)</span> <span class="o">{</span>
+        <span class="n">event</span> <span class="o">=</span> <span class="n">channel</span><span class="o">.</span><span class="na">take</span><span class="o">();</span>
+        <span class="k">if</span> <span class="o">(</span><span class="n">event</span> <span class="o">!=</span> <span class="kc">null</span><span class="o">)</span> <span class="o">{</span>
+          <span class="kt">long</span> <span class="n">id</span> <span class="o">=</span> <span class="n">Long</span><span class="o">.</span><span class="na">parseLong</span><span class="o">(</span><span class="k">new</span> <span class="n">String</span><span class="o">(</span><span class="n">event</span><span class="o">.</span><span class="na">getBody</span><span class="o">()));</span>
+          <span class="n">total</span> <span class="o">+=</span> <span class="n">id</span><span class="o">;</span>
+          <span class="n">event</span><span class="o">.</span><span class="na">getHeaders</span><span class="o">().</span><span class="na">put</span><span class="o">(</span><span class="s">&quot;Total&quot;</span><span class="o">,</span> <span class="n">Long</span><span class="o">.</span><span class="na">toString</span><span class="o">(</span><span class="n">total</span><span class="o">));</span>
+          <span class="n">eventProcessor</span><span class="o">.</span><span class="na">processEvent</span><span class="o">(</span><span class="n">event</span><span class="o">);</span>
+          <span class="n">logger</span><span class="o">.</span><span class="na">info</span><span class="o">(</span><span class="s">&quot;Null sink {} successful processed event {}&quot;</span><span class="o">,</span> <span class="n">getName</span><span class="o">(),</span> <span class="n">id</span><span class="o">);</span>
+        <span class="o">}</span> <span class="k">else</span> <span class="o">{</span>
+          <span class="n">status</span> <span class="o">=</span> <span class="n">Status</span><span class="o">.</span><span class="na">BACKOFF</span><span class="o">;</span>
+          <span class="k">break</span><span class="o">;</span>
+        <span class="o">}</span>
+      <span class="o">}</span>
+      <span class="n">transaction</span><span class="o">.</span><span class="na">commit</span><span class="o">();</span>
+      <span class="n">counterGroup</span><span class="o">.</span><span class="na">addAndGet</span><span class="o">(</span><span class="s">&quot;events.success&quot;</span><span class="o">,</span> <span class="o">(</span><span class="kt">long</span><span class="o">)</span> <span class="n">Math</span><span class="o">.</span><span class="na">min</span><span class="o">(</span><span class="n">batchSize</span><span class="o">,</span> <span class="n">i</span><span class="o">));</span>
+      <span class="n">counterGroup</span><span class="o">.</span><span class="na">incrementAndGet</span><span class="o">(</span><span class="s">&quot;transaction.success&quot;</span><span class="o">);</span>
+    <span class="o">}</span> <span class="k">catch</span> <span class="o">(</span><span class="n">Exception</span> <span class="n">ex</span><span class="o">)</span> <span class="o">{</span>
+      <span class="n">transaction</span><span class="o">.</span><span class="na">rollback</span><span class="o">();</span>
+      <span class="n">counterGroup</span><span class="o">.</span><span class="na">incrementAndGet</span><span class="o">(</span><span class="s">&quot;transaction.failed&quot;</span><span class="o">);</span>
+      <span class="n">logger</span><span class="o">.</span><span class="na">error</span><span class="o">(</span><span class="s">&quot;Failed to deliver event. Exception follows.&quot;</span><span class="o">,</span> <span class="n">ex</span><span class="o">);</span>
+      <span class="k">throw</span> <span class="k">new</span> <span class="nf">EventDeliveryException</span><span class="o">(</span><span class="s">&quot;Failed to deliver event: &quot;</span> <span class="o">+</span> <span class="n">event</span><span class="o">,</span> <span class="n">ex</span><span class="o">);</span>
+    <span class="o">}</span> <span class="k">finally</span> <span class="o">{</span>
+      <span class="n">transaction</span><span class="o">.</span><span class="na">close</span><span class="o">();</span>
+    <span class="o">}</span>
+
+    <span class="k">return</span> <span class="n">status</span><span class="o">;</span>
+  <span class="o">}</span>
+<span class="o">}</span>
+</pre></div>
+</div>
+</div>
+</div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="sphinxsidebar">
+        <div class="sphinxsidebarwrapper"><h3><a href="index.html">Apache Flume</a></h3>
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="getinvolved.html">How to Get Involved</a></li>
+<li class="toctree-l1"><a class="reference internal" href="download.html">Download</a></li>
+<li class="toctree-l1"><a class="reference internal" href="documentation.html">Documentation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="releases/index.html">Releases</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mailinglists.html">Mailing lists</a></li>
+<li class="toctree-l1"><a class="reference internal" href="team.html">Team</a></li>
+<li class="toctree-l1"><a class="reference internal" href="source.html">Source Repository</a></li>
+<li class="toctree-l1"><a class="reference internal" href="testing.html">Testing</a></li>
+<li class="toctree-l1"><a class="reference internal" href="license.html">Apache License</a></li>
+</ul>
+
+<h3>Resources</h3>
+
+<ul class="this-page-menu">
+    <li><a href="https://issues.apache.org/jira/browse/FLUME">Flume Issue Tracking (Jira)</a></li>
+    <li><a href="http://cwiki.apache.org/confluence/display/FLUME">Flume Wiki</a></li>
+    <li><a href="http://cwiki.apache.org/confluence/display/FLUME/Getting+Started">Getting Started Guide</a></li>
+</ul>
+
+<h3>Apache</h3>
+
+<ul class="this-page-menu">
+    <li><a href="http://www.apache.org">Home</a></li>
+    <li><a href="http://www.apache.org/foundation/sponsorship.html">Sponsorship</a></li>
+    <li><a href="http://www.apache.org/licenses">Licenses</a> </li>
+    <li><a href="http://www.apache.org/foundation/thanks.html">Thanks</a></li>
+    <li><a href="http://www.apachecon.com">Conferences</a></li>
+    <li><a href="http://www.apache.org/security/">Security</a></li>
+    <li><a href="https://privacy.apache.org/policies/privacy-policy-public.html">Data Privacy</a></li>
+</ul>
+
+
+<h3><a href="index.html">This Page</a></h3>
+<ul>
+<li><a class="reference internal" href="#">Flume 1.10.0 Developer Guide</a><ul>
+<li><a class="reference internal" href="#introduction">Introduction</a><ul>
+<li><a class="reference internal" href="#overview">Overview</a></li>
+<li><a class="reference internal" href="#architecture">Architecture</a><ul>
+<li><a class="reference internal" href="#data-flow-model">Data flow model</a></li>
+<li><a class="reference internal" href="#reliability">Reliability</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#building-flume">Building Flume</a><ul>
+<li><a class="reference internal" href="#getting-the-source">Getting the source</a></li>
+<li><a class="reference internal" href="#compile-test-flume">Compile/test Flume</a></li>
+<li><a class="reference internal" href="#updating-protocol-buffer-version">Updating Protocol Buffer Version</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#developing-custom-components">Developing custom components</a><ul>
+<li><a class="reference internal" href="#client">Client</a><ul>
+<li><a class="reference internal" href="#client-sdk">Client SDK</a></li>
+<li><a class="reference internal" href="#rpc-client-interface">RPC client interface</a></li>
+<li><a class="reference internal" href="#rpc-clients-avro-and-thrift">RPC clients - Avro and Thrift</a></li>
+<li><a class="reference internal" href="#secure-rpc-client-thrift">Secure RPC client - Thrift</a></li>
+<li><a class="reference internal" href="#failover-client">Failover Client</a></li>
+<li><a class="reference internal" href="#loadbalancing-rpc-client">LoadBalancing RPC client</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#embedded-agent">Embedded agent</a></li>
+<li><a class="reference internal" href="#transaction-interface">Transaction interface</a></li>
+<li><a class="reference internal" href="#sink">Sink</a></li>
+<li><a class="reference internal" href="#source">Source</a></li>
+<li><a class="reference internal" href="#channel">Channel</a></li>
+<li><a class="reference internal" href="#initializable">Initializable</a></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+<div class="footer">
+    &copy; Copyright 2009-2022 The Apache Software Foundation. Apache Flume, Flume, Apache, the Apache feather logo, and the Apache Flume project logo are trademarks of The Apache Software Foundation..
+</div>
+  </body>
+</html>
\ No newline at end of file
diff --git a/content/releases/content/1.10.0/FlumeUserGuide.html b/content/releases/content/1.10.0/FlumeUserGuide.html
new file mode 100644
index 0000000..1a094b5
--- /dev/null
+++ b/content/releases/content/1.10.0/FlumeUserGuide.html
@@ -0,0 +1,8943 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Flume 1.10.0 User Guide &mdash; Apache Flume</title>
+    
+    <link rel="stylesheet" href="_static/flume.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    '',
+        VERSION:     '',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <link rel="top" title="Apache Flume" href="index.html" />
+    <link rel="up" title="Documentation" href="documentation.html" />
+    <link rel="next" title="Flume 1.10.0 Developer Guide" href="FlumeDeveloperGuide.html" />
+    <link rel="prev" title="Documentation" href="documentation.html" /> 
+  </head>
+  <body>
+<div class="header">
+  <table width="100%" border="0">
+    <tr>
+      <td width="10%">
+        <div class="logo">
+          <a href="index.html">
+            <img class="logo" src="_static/flume-logo.png" alt="Logo"/>
+          </a>
+        </div>
+      </td>
+      <td width="2%">
+          <span class="trademark">&trade;</span>
+      </td>
+      <td width="68%" align="center" class="pageTitle">Apache Flume<sup><span class="trademark">&trade;</span></sup>
+      </td>
+      <td width="20%">
+          <a href="http://www.apache.org">
+            <img src="_static/feather-small.png" alt="Apache Software Foundation" height="70"/>
+          </a>
+      </td>
+    </tr>
+  </table>
+</div>
+  
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="flume-1-10-0-user-guide">
+<h1>Flume 1.10.0 User Guide<a class="headerlink" href="#flume-1-10-0-user-guide" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="introduction">
+<h2>Introduction<a class="headerlink" href="#introduction" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="overview">
+<h3>Overview<a class="headerlink" href="#overview" title="Permalink to this headline">¶</a></h3>
+<p>Apache Flume is a distributed, reliable, and available system for efficiently
+collecting, aggregating and moving large amounts of log data from many
+different sources to a centralized data store.</p>
+<p>The use of Apache Flume is not only restricted to log data aggregation.
+Since data sources are customizable, Flume can be used to transport massive quantities
+of event data including but not limited to network traffic data, social-media-generated data,
+email messages and pretty much any data source possible.</p>
+<p>Apache Flume is a top level project at the Apache Software Foundation.</p>
+</div>
+<div class="section" id="system-requirements">
+<h3>System Requirements<a class="headerlink" href="#system-requirements" title="Permalink to this headline">¶</a></h3>
+<ol class="arabic simple">
+<li>Java Runtime Environment - Java 1.8 or later</li>
+<li>Memory - Sufficient memory for configurations used by sources, channels or sinks</li>
+<li>Disk Space - Sufficient disk space for configurations used by channels or sinks</li>
+<li>Directory Permissions - Read/Write permissions for directories used by agent</li>
+</ol>
+</div>
+<div class="section" id="architecture">
+<h3>Architecture<a class="headerlink" href="#architecture" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="data-flow-model">
+<h4>Data flow model<a class="headerlink" href="#data-flow-model" title="Permalink to this headline">¶</a></h4>
+<p>A Flume event is defined as a unit of data flow having a byte payload and an
+optional set of string attributes. A Flume agent is a (JVM) process that hosts
+the components through which events flow from an external source to the next
+destination (hop).</p>
+<div class="figure align-center">
+<img alt="Agent component diagram" src="_images/UserGuide_image00.png" />
+</div>
+<p>A Flume source consumes events delivered to it by an external source like a web
+server. The external source sends events to Flume in a format that is
+recognized by the target Flume source. For example, an Avro Flume source can be
+used to receive Avro events from Avro clients or other Flume agents in the flow
+that send events from an Avro sink. A similar flow can be defined using
+a Thrift Flume Source to receive events from a Thrift Sink or a Flume
+Thrift Rpc Client or Thrift clients written in any language generated from
+the Flume thrift protocol.When a Flume source receives an event, it
+stores it into one or more channels. The channel is a passive store that keeps
+the event until it&#8217;s consumed by a Flume sink. The file channel is one example
+&#8211; it is backed by the local filesystem. The sink removes the event
+from the channel and puts it into an external repository like HDFS (via Flume
+HDFS sink) or forwards it to the Flume source of the next Flume agent (next
+hop) in the flow. The source and sink within the given agent run asynchronously
+with the events staged in the channel.</p>
+</div>
+<div class="section" id="complex-flows">
+<h4>Complex flows<a class="headerlink" href="#complex-flows" title="Permalink to this headline">¶</a></h4>
+<p>Flume allows a user to build multi-hop flows where events travel through
+multiple agents before reaching the final destination. It also allows fan-in
+and fan-out flows, contextual routing and backup routes (fail-over) for failed
+hops.</p>
+</div>
+<div class="section" id="reliability">
+<h4>Reliability<a class="headerlink" href="#reliability" title="Permalink to this headline">¶</a></h4>
+<p>The events are staged in a channel on each agent. The events are then delivered
+to the next agent or terminal repository (like HDFS) in the flow. The events
+are removed from a channel only after they are stored in the channel of next
+agent or in the terminal repository. This is a how the single-hop message
+delivery semantics in Flume provide end-to-end reliability of the flow.</p>
+<p>Flume uses a transactional approach to guarantee the reliable delivery of the
+events. The sources and sinks encapsulate in a transaction the
+storage/retrieval, respectively, of the events placed in or provided by a
+transaction provided by the channel.  This ensures that the set of events are
+reliably passed from point to point in the flow. In the case of a multi-hop
+flow, the sink from the previous hop and the source from the next hop both have
+their transactions running to ensure that the data is safely stored in the
+channel of the next hop.</p>
+</div>
+<div class="section" id="recoverability">
+<h4>Recoverability<a class="headerlink" href="#recoverability" title="Permalink to this headline">¶</a></h4>
+<p>The events are staged in the channel, which manages recovery from failure.
+Flume supports a durable file channel which is backed by the local file system.
+There&#8217;s also a memory channel which simply stores the events in an in-memory
+queue, which is faster but any events still left in the memory channel when an
+agent process dies can&#8217;t be recovered.</p>
+<p>Flume&#8217;s <cite>KafkaChannel</cite> uses Apache Kafka to stage events. Using a replicated
+Kafka topic as a channel helps avoiding event loss in case of a disk failure.</p>
+</div>
+</div>
+</div>
+<div class="section" id="setup">
+<h2>Setup<a class="headerlink" href="#setup" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="setting-up-an-agent">
+<h3>Setting up an agent<a class="headerlink" href="#setting-up-an-agent" title="Permalink to this headline">¶</a></h3>
+<p>Flume agent configuration is stored in one or more configuration files that
+follow the Java properties file format. Configurations for one or more agents
+can be specified in these configuration files. The configuration includes
+properties of each source, sink and channel in an agent and how they are wired
+together to form data flows.</p>
+<div class="section" id="configuring-individual-components">
+<h4>Configuring individual components<a class="headerlink" href="#configuring-individual-components" title="Permalink to this headline">¶</a></h4>
+<p>Each component (source, sink or channel) in the flow has a name, type, and set
+of properties that are specific to the type and instantiation. For example, an
+Avro source needs a hostname (or IP address) and a port number to receive data
+from. A memory channel can have max queue size (&#8220;capacity&#8221;), and an HDFS sink
+needs to know the file system URI, path to create files, frequency of file
+rotation (&#8220;hdfs.rollInterval&#8221;) etc. All such attributes of a component needs to
+be set in the properties file of the hosting Flume agent.</p>
+</div>
+<div class="section" id="wiring-the-pieces-together">
+<h4>Wiring the pieces together<a class="headerlink" href="#wiring-the-pieces-together" title="Permalink to this headline">¶</a></h4>
+<p>The agent needs to know what individual components to load and how they are
+connected in order to constitute the flow. This is done by listing the names of
+each of the sources, sinks and channels in the agent, and then specifying the
+connecting channel for each sink and source. For example, an agent flows events
+from an Avro source called avroWeb to HDFS sink hdfs-cluster1 via a file
+channel called file-channel. The configuration file will contain names of these
+components and file-channel as a shared channel for both avroWeb source and
+hdfs-cluster1 sink.</p>
+</div>
+<div class="section" id="starting-an-agent">
+<h4>Starting an agent<a class="headerlink" href="#starting-an-agent" title="Permalink to this headline">¶</a></h4>
+<p>An agent is started using a shell script called flume-ng which is located in
+the bin directory of the Flume distribution. You need to specify the agent
+name, the config directory, and the config file on the command line:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng agent -n $agent_name -c conf -f conf/flume-conf.properties.template
+</pre></div>
+</div>
+<p>Now the agent will start running source and sinks configured in the given
+properties file.</p>
+</div>
+<div class="section" id="a-simple-example">
+<h4>A simple example<a class="headerlink" href="#a-simple-example" title="Permalink to this headline">¶</a></h4>
+<p>Here, we give an example configuration file, describing a single-node Flume deployment.
+This configuration lets a user generate events and subsequently logs them to the console.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># example.conf: A single-node Flume configuration</span>
+
+<span class="c"># Name the components on this agent</span>
+<span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+
+<span class="c"># Describe/configure the source</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">netcat</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">localhost</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">44444</span>
+
+<span class="c"># Describe the sink</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">logger</span>
+
+<span class="c"># Use a channel which buffers events in memory</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">memory</span>
+<span class="na">a1.channels.c1.capacity</span> <span class="o">=</span> <span class="s">1000</span>
+<span class="na">a1.channels.c1.transactionCapacity</span> <span class="o">=</span> <span class="s">100</span>
+
+<span class="c"># Bind the source and sink to the channel</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+<p>This configuration defines a single agent named a1. a1 has a source that listens for data on port 44444, a channel
+that buffers event data in memory, and a sink that logs event data to the console. The configuration file names the
+various components, then describes their types and configuration parameters. A given configuration file might define
+several named agents; when a given Flume process is launched a flag is passed telling it which named agent to manifest.</p>
+<p>Given this configuration file, we can start Flume as follows:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng agent --conf conf --conf-file example.conf --name a1 -Dflume.root.logger=INFO,console
+</pre></div>
+</div>
+<p>Note that in a full deployment we would typically include one more option: <tt class="docutils literal"><span class="pre">--conf=&lt;conf-dir&gt;</span></tt>.
+The <tt class="docutils literal"><span class="pre">&lt;conf-dir&gt;</span></tt> directory would include a shell script <em>flume-env.sh</em> and potentially a log4j properties file.
+In this example, we pass a Java option to force Flume to log to the console and we go without a custom environment script.</p>
+<p>From a separate terminal, we can then telnet port 44444 and send Flume an event:</p>
+<div class="highlight-properties"><pre>$ telnet localhost 44444
+Trying 127.0.0.1...
+Connected to localhost.localdomain (127.0.0.1).
+Escape character is '^]'.
+Hello world! &lt;ENTER&gt;
+OK</pre>
+</div>
+<p>The original Flume terminal will output the event in a log message.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">12/06/19 15</span><span class="o">:</span><span class="s">32:19 INFO source.NetcatSource: Source starting</span>
+<span class="na">12/06/19 15</span><span class="o">:</span><span class="s">32:19 INFO source.NetcatSource: Created serverSocket:sun.nio.ch.ServerSocketChannelImpl[/127.0.0.1:44444]</span>
+<span class="na">12/06/19 15</span><span class="o">:</span><span class="s">32:34 INFO sink.LoggerSink: Event: { headers:{} body: 48 65 6C 6C 6F 20 77 6F 72 6C 64 21 0D          Hello world!. }</span>
+</pre></div>
+</div>
+<p>Congratulations - you&#8217;ve successfully configured and deployed a Flume agent! Subsequent sections cover agent configuration in much more detail.</p>
+</div>
+<div class="section" id="configuration-from-uris">
+<h4>Configuration from URIs<a class="headerlink" href="#configuration-from-uris" title="Permalink to this headline">¶</a></h4>
+<p>As of version 1.10.0 Flume supports being configured using URIs instead of just from local files. Direct support
+for HTTP(S), file, and classpath URIs is included. The HTTP support includes support for authentication using
+basic authorization but other authorization mechanisms may be supported by specifying the fully qualified name
+of the class that implements the AuthorizationProvider interface using the &#8211;auth-provider option. HTTP also
+supports reloading of configuration files using polling if the target server properly responds to the If-Modified-Since
+header.</p>
+<p>To specify credentials for HTTP authentication add:</p>
+<div class="highlight-none"><div class="highlight"><pre>--conf-user userid --conf-password password
+</pre></div>
+</div>
+<p>to the startup command.</p>
+</div>
+<div class="section" id="multiple-configuration-files">
+<h4>Multiple Configuration Files<a class="headerlink" href="#multiple-configuration-files" title="Permalink to this headline">¶</a></h4>
+<p>As of version 1.10.0 Flume supports being configured from multiple configuration files instead of just one.
+This more easily allows values to be overridden or added based on specific environments. Each file should
+be configured using its own &#8211;conf-file or &#8211;conf-uri option. However, all files should either be provided
+with &#8211;conf-file or with &#8211;conf-uri. If &#8211;conf-file and &#8211;conf-uri appear together as options all &#8211;conf-uri
+configurations will be processed before any of the &#8211;conf-file configurations are merged.</p>
+<p>For example, a configuration of:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng agent --conf conf --conf-file example.conf --conf-uri http://localhost:80/flume.conf --conf-uri http://localhost:80/override.conf --name a1 -Dflume.root.logger=INFO,console
+</pre></div>
+</div>
+<p>will cause flume.conf to be read first, override.conf to be merged with it and finally example.conf would be
+merged last. If it is desirec to have example.conf be the base configuration it should be specified using the
+&#8211;conf-uri option either as:</p>
+<div class="highlight-none"><div class="highlight"><pre>--conf-uri classpath://example.conf
+or
+--conf-uri file:///example.conf
+</pre></div>
+</div>
+<p>depending on how it should be accessed.</p>
+</div>
+<div class="section" id="using-environment-variables-system-properies-or-other-properties-configuration-files">
+<h4>Using environment variables, system properies, or other properties configuration files<a class="headerlink" href="#using-environment-variables-system-properies-or-other-properties-configuration-files" title="Permalink to this headline">¶</a></h4>
+<p>Flume has the ability to substitute environment variables in the configuration. For example:</p>
+<div class="highlight-none"><div class="highlight"><pre>a1.sources = r1
+a1.sources.r1.type = netcat
+a1.sources.r1.bind = 0.0.0.0
+a1.sources.r1.port = ${env:NC_PORT}
+a1.sources.r1.channels = c1
+</pre></div>
+</div>
+<p>NB: it currently works for values only, not for keys. (Ie. only on the &#8220;right side&#8221; of the <cite>=</cite> mark of the config lines.)</p>
+<p>As of version 1.10.0 Flume resolves configuration values using Apache Commons Text&#8217;s StringSubstitutor
+class using the default set of Lookups along with a lookup that uses the configuration files as a
+source for replacement values.</p>
+<dl class="docutils">
+<dt>For example::</dt>
+<dd>$ NC_PORT=44444 bin/flume-ng agent &#8211;conf conf &#8211;conf-file example.conf &#8211;name a1 -Dflume.root.logger=INFO,console</dd>
+</dl>
+<p>Note the above is just an example, environment variables can be configured in other ways, including being set in <cite>conf/flume-env.sh</cite>.</p>
+<p>As noted, system properties are also supported, so the configuration:</p>
+<div class="highlight-none"><div class="highlight"><pre>a1.sources = r1
+a1.sources.r1.type = netcat
+a1.sources.r1.bind = 0.0.0.0
+a1.sources.r1.port = ${sys:NC_PORT}
+a1.sources.r1.channels = c1
+</pre></div>
+</div>
+<p>could be used and the startup command could be:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng agent --conf conf --conf-file example.conf --name a1 -Dflume.root.logger=INFO,console -DNC_PORT=44444
+</pre></div>
+</div>
+<p>Furthermore, because multiple configuration files are allowed the first file could contain:</p>
+<div class="highlight-none"><div class="highlight"><pre>a1.sources = r1
+a1.sources.r1.type = netcat
+a1.sources.r1.bind = 0.0.0.0
+a1.sources.r1.port = ${NC_PORT}
+a1.sources.r1.channels = c1
+</pre></div>
+</div>
+<p>and the override file could contain:</p>
+<div class="highlight-none"><div class="highlight"><pre>NC_PORT = 44444
+</pre></div>
+</div>
+<p>In this case the startup command could be:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng agent --conf conf --conf-file example.conf --conf-file override.conf --name a1 -Dflume.root.logger=INFO,console
+</pre></div>
+</div>
+<p>Note that the method for specifying environment variables as was done in prior versions will stil work
+but has been deprecated in favor of using ${env:varName}.</p>
+</div>
+<div class="section" id="using-a-command-options-file">
+<h4>Using a command options file<a class="headerlink" href="#using-a-command-options-file" title="Permalink to this headline">¶</a></h4>
+<p>Instead of specifying all the command options on the command line as of version 1.10.0 command
+options may be placed in either /etc/flume/flume.opts or flume.opts on the classpath. An example
+might be:</p>
+<div class="highlight-none"><div class="highlight"><pre>conf-file = example.conf
+conf-file = override.conf
+name = a1
+</pre></div>
+</div>
+</div>
+<div class="section" id="logging-raw-data">
+<h4>Logging raw data<a class="headerlink" href="#logging-raw-data" title="Permalink to this headline">¶</a></h4>
+<p>Logging the raw stream of data flowing through the ingest pipeline is not desired behavior in
+many production environments because this may result in leaking sensitive data or security related
+configurations, such as secret keys, to Flume log files.
+By default, Flume will not log such information. On the other hand, if the data pipeline is broken,
+Flume will attempt to provide clues for debugging the problem.</p>
+<p>One way to debug problems with event pipelines is to set up an additional <a class="reference internal" href="#memory-channel">Memory Channel</a>
+connected to a <a class="reference internal" href="#logger-sink">Logger Sink</a>, which will output all event data to the Flume logs.
+In some situations, however, this approach is insufficient.</p>
+<p>In order to enable logging of event- and configuration-related data, some Java system properties
+must be set in addition to log4j properties.</p>
+<p>To enable configuration-related logging, set the Java system property
+<tt class="docutils literal"><span class="pre">-Dorg.apache.flume.log.printconfig=true</span></tt>. This can either be passed on the command line or by
+setting this in the <tt class="docutils literal"><span class="pre">JAVA_OPTS</span></tt> variable in <em>flume-env.sh</em>.</p>
+<p>To enable data logging, set the Java system property <tt class="docutils literal"><span class="pre">-Dorg.apache.flume.log.rawdata=true</span></tt>
+in the same way described above. For most components, the log4j logging level must also be set to
+DEBUG or TRACE to make event-specific logging appear in the Flume logs.</p>
+<p>Here is an example of enabling both configuration logging and raw data logging while also
+setting the Log4j loglevel to DEBUG for console output:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng agent --conf conf --conf-file example.conf --name a1 -Dflume.root.logger=DEBUG,console -Dorg.apache.flume.log.printconfig=true -Dorg.apache.flume.log.rawdata=true
+</pre></div>
+</div>
+</div>
+<div class="section" id="zookeeper-based-configuration">
+<h4>Zookeeper based Configuration<a class="headerlink" href="#zookeeper-based-configuration" title="Permalink to this headline">¶</a></h4>
+<p>Flume supports Agent configurations via Zookeeper. <em>This is an experimental feature.</em> The configuration file needs to be uploaded
+in the Zookeeper, under a configurable prefix. The configuration file is stored in Zookeeper Node data.
+Following is how the Zookeeper Node tree would look like for agents a1 and a2</p>
+<div class="highlight-properties"><pre>- /flume
+ |- /a1 [Agent config file]
+ |- /a2 [Agent config file]</pre>
+</div>
+<p>Once the configuration file is uploaded, start the agent with following options</p>
+<blockquote>
+<div>$ bin/flume-ng agent &#8211;conf conf -z zkhost:2181,zkhost1:2181 -p /flume &#8211;name a1 -Dflume.root.logger=INFO,console</div></blockquote>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="15%" />
+<col width="68%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Argument Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>z</strong></td>
+<td>&#8211;</td>
+<td>Zookeeper connection string. Comma separated list of hostname:port</td>
+</tr>
+<tr class="row-odd"><td><strong>p</strong></td>
+<td>/flume</td>
+<td>Base Path in Zookeeper to store Agent configurations</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="installing-third-party-plugins">
+<h4>Installing third-party plugins<a class="headerlink" href="#installing-third-party-plugins" title="Permalink to this headline">¶</a></h4>
+<p>Flume has a fully plugin-based architecture. While Flume ships with many
+out-of-the-box sources, channels, sinks, serializers, and the like, many
+implementations exist which ship separately from Flume.</p>
+<p>While it has always been possible to include custom Flume components by
+adding their jars to the FLUME_CLASSPATH variable in the flume-env.sh file,
+Flume now supports a special directory called <tt class="docutils literal"><span class="pre">plugins.d</span></tt> which automatically
+picks up plugins that are packaged in a specific format. This allows for easier
+management of plugin packaging issues as well as simpler debugging and
+troubleshooting of several classes of issues, especially library dependency
+conflicts.</p>
+<div class="section" id="the-plugins-d-directory">
+<h5>The plugins.d directory<a class="headerlink" href="#the-plugins-d-directory" title="Permalink to this headline">¶</a></h5>
+<p>The <tt class="docutils literal"><span class="pre">plugins.d</span></tt> directory is located at <tt class="docutils literal"><span class="pre">$FLUME_HOME/plugins.d</span></tt>. At startup
+time, the <tt class="docutils literal"><span class="pre">flume-ng</span></tt> start script looks in the <tt class="docutils literal"><span class="pre">plugins.d</span></tt> directory for
+plugins that conform to the below format and includes them in proper paths when
+starting up <tt class="docutils literal"><span class="pre">java</span></tt>.</p>
+</div>
+<div class="section" id="directory-layout-for-plugins">
+<h5>Directory layout for plugins<a class="headerlink" href="#directory-layout-for-plugins" title="Permalink to this headline">¶</a></h5>
+<p>Each plugin (subdirectory) within <tt class="docutils literal"><span class="pre">plugins.d</span></tt> can have up to three
+sub-directories:</p>
+<ol class="arabic simple">
+<li>lib - the plugin&#8217;s jar(s)</li>
+<li>libext - the plugin&#8217;s dependency jar(s)</li>
+<li>native - any required native libraries, such as <tt class="docutils literal"><span class="pre">.so</span></tt> files</li>
+</ol>
+<p>Example of two plugins within the plugins.d directory:</p>
+<div class="highlight-none"><div class="highlight"><pre>plugins.d/
+plugins.d/custom-source-1/
+plugins.d/custom-source-1/lib/my-source.jar
+plugins.d/custom-source-1/libext/spring-core-2.5.6.jar
+plugins.d/custom-source-2/
+plugins.d/custom-source-2/lib/custom.jar
+plugins.d/custom-source-2/native/gettext.so
+</pre></div>
+</div>
+</div>
+</div>
+</div>
+<div class="section" id="data-ingestion">
+<h3>Data ingestion<a class="headerlink" href="#data-ingestion" title="Permalink to this headline">¶</a></h3>
+<p>Flume supports a number of mechanisms to ingest data from external sources.</p>
+<div class="section" id="rpc">
+<h4>RPC<a class="headerlink" href="#rpc" title="Permalink to this headline">¶</a></h4>
+<p>An Avro client included in the Flume distribution can send a given file to
+Flume Avro source using avro RPC mechanism:</p>
+<div class="highlight-none"><div class="highlight"><pre>$ bin/flume-ng avro-client -H localhost -p 41414 -F /usr/logs/log.10
+</pre></div>
+</div>
+<p>The above command will send the contents of /usr/logs/log.10 to to the Flume
+source listening on that ports.</p>
+</div>
+<div class="section" id="executing-commands">
+<h4>Executing commands<a class="headerlink" href="#executing-commands" title="Permalink to this headline">¶</a></h4>
+<p>There&#8217;s an exec source that executes a given command and consumes the output. A
+single &#8216;line&#8217; of output ie. text followed by carriage return (&#8216;\r&#8217;) or line
+feed (&#8216;\n&#8217;) or both together.</p>
+</div>
+<div class="section" id="network-streams">
+<h4>Network streams<a class="headerlink" href="#network-streams" title="Permalink to this headline">¶</a></h4>
+<p>Flume supports the following mechanisms to read data from popular log stream
+types, such as:</p>
+<ol class="arabic simple">
+<li>Avro</li>
+<li>Thrift</li>
+<li>Syslog</li>
+<li>Netcat</li>
+</ol>
+</div>
+</div>
+<div class="section" id="setting-multi-agent-flow">
+<h3>Setting multi-agent flow<a class="headerlink" href="#setting-multi-agent-flow" title="Permalink to this headline">¶</a></h3>
+<div class="figure align-center">
+<img alt="Two agents communicating over Avro RPC" src="_images/UserGuide_image03.png" />
+</div>
+<p>In order to flow the data across multiple agents or hops, the sink of the
+previous agent and source of the current hop need to be avro type with the sink
+pointing to the hostname (or IP address) and port of the source.</p>
+</div>
+<div class="section" id="consolidation">
+<h3>Consolidation<a class="headerlink" href="#consolidation" title="Permalink to this headline">¶</a></h3>
+<p>A very common scenario in log collection is a large number of log producing
+clients sending data to a few consumer agents that are attached to the storage
+subsystem. For example, logs collected from hundreds of web servers sent to a
+dozen of agents that write to HDFS cluster.</p>
+<div class="figure align-center">
+<img alt="A fan-in flow using Avro RPC to consolidate events in one place" src="_images/UserGuide_image02.png" />
+</div>
+<p>This can be achieved in Flume by configuring a number of first tier agents with
+an avro sink, all pointing to an avro source of single agent (Again you could
+use the thrift sources/sinks/clients in such a scenario). This source
+on the second tier agent consolidates the received events into a single
+channel which is consumed by a sink to its final destination.</p>
+</div>
+<div class="section" id="multiplexing-the-flow">
+<h3>Multiplexing the flow<a class="headerlink" href="#multiplexing-the-flow" title="Permalink to this headline">¶</a></h3>
+<p>Flume supports multiplexing the event flow to one or more destinations. This is
+achieved by defining a flow multiplexer that can replicate or selectively route
+an event to one or more channels.</p>
+<div class="figure align-center">
+<img alt="A fan-out flow using a (multiplexing) channel selector" src="_images/UserGuide_image01.png" />
+</div>
+<p>The above example shows a source from agent &#8220;foo&#8221; fanning out the flow to three
+different channels. This fan out can be replicating or multiplexing. In case of
+replicating flow, each event is sent to all three channels. For the
+multiplexing case, an event is delivered to a subset of available channels when
+an event&#8217;s attribute matches a preconfigured value. For example, if an event
+attribute called &#8220;txnType&#8221; is set to &#8220;customer&#8221;, then it should go to channel1
+and channel3, if it&#8217;s &#8220;vendor&#8221; then it should go to channel2, otherwise
+channel3. The mapping can be set in the agent&#8217;s configuration file.</p>
+</div>
+</div>
+<div class="section" id="configuration">
+<h2>Configuration<a class="headerlink" href="#configuration" title="Permalink to this headline">¶</a></h2>
+<p>As mentioned in the earlier section, Flume agent configuration is read from a
+file that resembles a Java property file format with hierarchical property
+settings.</p>
+<div class="section" id="defining-the-flow">
+<h3>Defining the flow<a class="headerlink" href="#defining-the-flow" title="Permalink to this headline">¶</a></h3>
+<p>To define the flow within a single agent, you need to link the sources and
+sinks via a channel. You need to list the sources, sinks and channels for the
+given agent, and then point the source and sink to a channel. A source instance
+can specify multiple channels, but a sink instance can only specify one channel.
+The format is as follows:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list the sources, sinks and channels for the agent</span>
+<span class="na">&lt;Agent&gt;.sources</span> <span class="o">=</span> <span class="s">&lt;Source&gt;</span>
+<span class="na">&lt;Agent&gt;.sinks</span> <span class="o">=</span> <span class="s">&lt;Sink&gt;</span>
+<span class="na">&lt;Agent&gt;.channels</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt; &lt;Channel2&gt;</span>
+
+<span class="c"># set channel for source</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source&gt;.channels</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt; &lt;Channel2&gt; ...</span>
+
+<span class="c"># set channel for sink</span>
+<span class="na">&lt;Agent&gt;.sinks.&lt;Sink&gt;.channel</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt;</span>
+</pre></div>
+</div>
+<p>For example, an agent named agent_foo is reading data from an external avro client and sending
+it to HDFS via a memory channel. The config file weblog.config could look like:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list the sources, sinks and channels for the agent</span>
+<span class="na">agent_foo.sources</span> <span class="o">=</span> <span class="s">avro-appserver-src-1</span>
+<span class="na">agent_foo.sinks</span> <span class="o">=</span> <span class="s">hdfs-sink-1</span>
+<span class="na">agent_foo.channels</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+
+<span class="c"># set channel for source</span>
+<span class="na">agent_foo.sources.avro-appserver-src-1.channels</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+
+<span class="c"># set channel for sink</span>
+<span class="na">agent_foo.sinks.hdfs-sink-1.channel</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+</pre></div>
+</div>
+<p>This will make the events flow from avro-AppSrv-source to hdfs-Cluster1-sink
+through the memory channel mem-channel-1. When the agent is started with the
+weblog.config as its config file, it will instantiate that flow.</p>
+</div>
+<div class="section" id="id1">
+<h3>Configuring individual components<a class="headerlink" href="#id1" title="Permalink to this headline">¶</a></h3>
+<p>After defining the flow, you need to set properties of each source, sink and
+channel. This is done in the same hierarchical namespace fashion where you set
+the component type and other values for the properties specific to each
+component:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># properties for sources</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source&gt;.&lt;someProperty&gt;</span> <span class="o">=</span> <span class="s">&lt;someValue&gt;</span>
+
+<span class="c"># properties for channels</span>
+<span class="na">&lt;Agent&gt;.channels.&lt;Channel&gt;.&lt;someProperty&gt;</span> <span class="o">=</span> <span class="s">&lt;someValue&gt;</span>
+
+<span class="c"># properties for sinks</span>
+<span class="na">&lt;Agent&gt;.sinks.&lt;Sink&gt;.&lt;someProperty&gt;</span> <span class="o">=</span> <span class="s">&lt;someValue&gt;</span>
+</pre></div>
+</div>
+<p>The property &#8220;type&#8221; needs to be set for each component for Flume to understand
+what kind of object it needs to be. Each source, sink and channel type has its
+own set of properties required for it to function as intended. All those need
+to be set as needed. In the previous example, we have a flow from
+avro-AppSrv-source to hdfs-Cluster1-sink through the memory channel
+mem-channel-1. Here&#8217;s an example that shows configuration of each of those
+components:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">agent_foo.sources</span> <span class="o">=</span> <span class="s">avro-AppSrv-source</span>
+<span class="na">agent_foo.sinks</span> <span class="o">=</span> <span class="s">hdfs-Cluster1-sink</span>
+<span class="na">agent_foo.channels</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+
+<span class="c"># set channel for sources, sinks</span>
+
+<span class="c"># properties of avro-AppSrv-source</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source.type</span> <span class="o">=</span> <span class="s">avro</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source.bind</span> <span class="o">=</span> <span class="s">localhost</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source.port</span> <span class="o">=</span> <span class="s">10000</span>
+
+<span class="c"># properties of mem-channel-1</span>
+<span class="na">agent_foo.channels.mem-channel-1.type</span> <span class="o">=</span> <span class="s">memory</span>
+<span class="na">agent_foo.channels.mem-channel-1.capacity</span> <span class="o">=</span> <span class="s">1000</span>
+<span class="na">agent_foo.channels.mem-channel-1.transactionCapacity</span> <span class="o">=</span> <span class="s">100</span>
+
+<span class="c"># properties of hdfs-Cluster1-sink</span>
+<span class="na">agent_foo.sinks.hdfs-Cluster1-sink.type</span> <span class="o">=</span> <span class="s">hdfs</span>
+<span class="na">agent_foo.sinks.hdfs-Cluster1-sink.hdfs.path</span> <span class="o">=</span> <span class="s">hdfs://namenode/flume/webdata</span>
+
+<span class="c">#...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="adding-multiple-flows-in-an-agent">
+<h3>Adding multiple flows in an agent<a class="headerlink" href="#adding-multiple-flows-in-an-agent" title="Permalink to this headline">¶</a></h3>
+<p>A single Flume agent can contain several independent flows. You can list
+multiple sources, sinks and channels in a config. These components can be
+linked to form multiple flows:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list the sources, sinks and channels for the agent</span>
+<span class="na">&lt;Agent&gt;.sources</span> <span class="o">=</span> <span class="s">&lt;Source1&gt; &lt;Source2&gt;</span>
+<span class="na">&lt;Agent&gt;.sinks</span> <span class="o">=</span> <span class="s">&lt;Sink1&gt; &lt;Sink2&gt;</span>
+<span class="na">&lt;Agent&gt;.channels</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt; &lt;Channel2&gt;</span>
+</pre></div>
+</div>
+<p>Then you can link the sources and sinks to their corresponding channels (for
+sources) or channel (for sinks) to setup two different flows. For example, if
+you need to setup two flows in an agent, one going from an external avro client
+to external HDFS and another from output of a tail to avro sink, then here&#8217;s a
+config to do that:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list the sources, sinks and channels in the agent</span>
+<span class="na">agent_foo.sources</span> <span class="o">=</span> <span class="s">avro-AppSrv-source1 exec-tail-source2</span>
+<span class="na">agent_foo.sinks</span> <span class="o">=</span> <span class="s">hdfs-Cluster1-sink1 avro-forward-sink2</span>
+<span class="na">agent_foo.channels</span> <span class="o">=</span> <span class="s">mem-channel-1 file-channel-2</span>
+
+<span class="c"># flow #1 configuration</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.channels</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+<span class="na">agent_foo.sinks.hdfs-Cluster1-sink1.channel</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+
+<span class="c"># flow #2 configuration</span>
+<span class="na">agent_foo.sources.exec-tail-source2.channels</span> <span class="o">=</span> <span class="s">file-channel-2</span>
+<span class="na">agent_foo.sinks.avro-forward-sink2.channel</span> <span class="o">=</span> <span class="s">file-channel-2</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="configuring-a-multi-agent-flow">
+<h3>Configuring a multi agent flow<a class="headerlink" href="#configuring-a-multi-agent-flow" title="Permalink to this headline">¶</a></h3>
+<p>To setup a multi-tier flow, you need to have an avro/thrift sink of first hop
+pointing to avro/thrift source of the next hop. This will result in the first
+Flume agent forwarding events to the next Flume agent. For example, if you are
+periodically sending files (1 file per event) using avro client to a local
+Flume agent, then this local agent can forward it to another agent that has the
+mounted for storage.</p>
+<p>Weblog agent config:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list sources, sinks and channels in the agent</span>
+<span class="na">agent_foo.sources</span> <span class="o">=</span> <span class="s">avro-AppSrv-source</span>
+<span class="na">agent_foo.sinks</span> <span class="o">=</span> <span class="s">avro-forward-sink</span>
+<span class="na">agent_foo.channels</span> <span class="o">=</span> <span class="s">file-channel</span>
+
+<span class="c"># define the flow</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source.channels</span> <span class="o">=</span> <span class="s">file-channel</span>
+<span class="na">agent_foo.sinks.avro-forward-sink.channel</span> <span class="o">=</span> <span class="s">file-channel</span>
+
+<span class="c"># avro sink properties</span>
+<span class="na">agent_foo.sinks.avro-forward-sink.type</span> <span class="o">=</span> <span class="s">avro</span>
+<span class="na">agent_foo.sinks.avro-forward-sink.hostname</span> <span class="o">=</span> <span class="s">10.1.1.100</span>
+<span class="na">agent_foo.sinks.avro-forward-sink.port</span> <span class="o">=</span> <span class="s">10000</span>
+
+<span class="c"># configure other pieces</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+<p>HDFS agent config:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list sources, sinks and channels in the agent</span>
+<span class="na">agent_foo.sources</span> <span class="o">=</span> <span class="s">avro-collection-source</span>
+<span class="na">agent_foo.sinks</span> <span class="o">=</span> <span class="s">hdfs-sink</span>
+<span class="na">agent_foo.channels</span> <span class="o">=</span> <span class="s">mem-channel</span>
+
+<span class="c"># define the flow</span>
+<span class="na">agent_foo.sources.avro-collection-source.channels</span> <span class="o">=</span> <span class="s">mem-channel</span>
+<span class="na">agent_foo.sinks.hdfs-sink.channel</span> <span class="o">=</span> <span class="s">mem-channel</span>
+
+<span class="c"># avro source properties</span>
+<span class="na">agent_foo.sources.avro-collection-source.type</span> <span class="o">=</span> <span class="s">avro</span>
+<span class="na">agent_foo.sources.avro-collection-source.bind</span> <span class="o">=</span> <span class="s">10.1.1.100</span>
+<span class="na">agent_foo.sources.avro-collection-source.port</span> <span class="o">=</span> <span class="s">10000</span>
+
+<span class="c"># configure other pieces</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+<p>Here we link the avro-forward-sink from the weblog agent to the
+avro-collection-source of the hdfs agent. This will result in the events coming
+from the external appserver source eventually getting stored in HDFS.</p>
+</div>
+<div class="section" id="fan-out-flow">
+<h3>Fan out flow<a class="headerlink" href="#fan-out-flow" title="Permalink to this headline">¶</a></h3>
+<p>As discussed in previous section, Flume supports fanning out the flow from one
+source to multiple channels. There are two modes of fan out, replicating and
+multiplexing. In the replicating flow, the event is sent to all the configured
+channels. In case of multiplexing, the event is sent to only a subset of
+qualifying channels. To fan out the flow, one needs to specify a list of
+channels for a source and the policy for the fanning it out. This is done by
+adding a channel &#8220;selector&#8221; that can be replicating or multiplexing. Then
+further specify the selection rules if it&#8217;s a multiplexer. If you don&#8217;t specify
+a selector, then by default it&#8217;s replicating:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># List the sources, sinks and channels for the agent</span>
+<span class="na">&lt;Agent&gt;.sources</span> <span class="o">=</span> <span class="s">&lt;Source1&gt;</span>
+<span class="na">&lt;Agent&gt;.sinks</span> <span class="o">=</span> <span class="s">&lt;Sink1&gt; &lt;Sink2&gt;</span>
+<span class="na">&lt;Agent&gt;.channels</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt; &lt;Channel2&gt;</span>
+
+<span class="c"># set list of channels for source (separated by space)</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.channels</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt; &lt;Channel2&gt;</span>
+
+<span class="c"># set channel for sinks</span>
+<span class="na">&lt;Agent&gt;.sinks.&lt;Sink1&gt;.channel</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt;</span>
+<span class="na">&lt;Agent&gt;.sinks.&lt;Sink2&gt;.channel</span> <span class="o">=</span> <span class="s">&lt;Channel2&gt;</span>
+
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.type</span> <span class="o">=</span> <span class="s">replicating</span>
+</pre></div>
+</div>
+<p>The multiplexing select has a further set of properties to bifurcate the flow.
+This requires specifying a mapping of an event attribute to a set for channel.
+The selector checks for each configured attribute in the event header. If it
+matches the specified value, then that event is sent to all the channels mapped
+to that value. If there&#8217;s no match, then the event is sent to set of channels
+configured as default:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># Mapping for multiplexing selector</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.type</span> <span class="o">=</span> <span class="s">multiplexing</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.header</span> <span class="o">=</span> <span class="s">&lt;someHeader&gt;</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.mapping.&lt;Value1&gt;</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt;</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.mapping.&lt;Value2&gt;</span> <span class="o">=</span> <span class="s">&lt;Channel1&gt; &lt;Channel2&gt;</span>
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.mapping.&lt;Value3&gt;</span> <span class="o">=</span> <span class="s">&lt;Channel2&gt;</span>
+<span class="c">#...</span>
+
+<span class="na">&lt;Agent&gt;.sources.&lt;Source1&gt;.selector.default</span> <span class="o">=</span> <span class="s">&lt;Channel2&gt;</span>
+</pre></div>
+</div>
+<p>The mapping allows overlapping the channels for each value.</p>
+<p>The following example has a single flow that multiplexed to two paths. The
+agent named agent_foo has a single avro source and two channels linked to two sinks:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># list the sources, sinks and channels in the agent</span>
+<span class="na">agent_foo.sources</span> <span class="o">=</span> <span class="s">avro-AppSrv-source1</span>
+<span class="na">agent_foo.sinks</span> <span class="o">=</span> <span class="s">hdfs-Cluster1-sink1 avro-forward-sink2</span>
+<span class="na">agent_foo.channels</span> <span class="o">=</span> <span class="s">mem-channel-1 file-channel-2</span>
+
+<span class="c"># set channels for source</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.channels</span> <span class="o">=</span> <span class="s">mem-channel-1 file-channel-2</span>
+
+<span class="c"># set channel for sinks</span>
+<span class="na">agent_foo.sinks.hdfs-Cluster1-sink1.channel</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+<span class="na">agent_foo.sinks.avro-forward-sink2.channel</span> <span class="o">=</span> <span class="s">file-channel-2</span>
+
+<span class="c"># channel selector configuration</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.type</span> <span class="o">=</span> <span class="s">multiplexing</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.header</span> <span class="o">=</span> <span class="s">State</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.CA</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.AZ</span> <span class="o">=</span> <span class="s">file-channel-2</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.NY</span> <span class="o">=</span> <span class="s">mem-channel-1 file-channel-2</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.default</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+</pre></div>
+</div>
+<p>The selector checks for a header called &#8220;State&#8221;. If the value is &#8220;CA&#8221; then its
+sent to mem-channel-1, if its &#8220;AZ&#8221; then it goes to file-channel-2 or if its
+&#8220;NY&#8221; then both. If the &#8220;State&#8221; header is not set or doesn&#8217;t match any of the
+three, then it goes to mem-channel-1 which is designated as &#8216;default&#8217;.</p>
+<p>The selector also supports optional channels. To specify optional channels for
+a header, the config parameter &#8216;optional&#8217; is used in the following way:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># channel selector configuration</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.type</span> <span class="o">=</span> <span class="s">multiplexing</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.header</span> <span class="o">=</span> <span class="s">State</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.CA</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.AZ</span> <span class="o">=</span> <span class="s">file-channel-2</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.NY</span> <span class="o">=</span> <span class="s">mem-channel-1 file-channel-2</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.optional.CA</span> <span class="o">=</span> <span class="s">mem-channel-1 file-channel-2</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.mapping.AZ</span> <span class="o">=</span> <span class="s">file-channel-2</span>
+<span class="na">agent_foo.sources.avro-AppSrv-source1.selector.default</span> <span class="o">=</span> <span class="s">mem-channel-1</span>
+</pre></div>
+</div>
+<p>The selector will attempt to write to the required channels first and will fail
+the transaction if even one of these channels fails to consume the events. The
+transaction is reattempted on <strong>all</strong> of the channels. Once all required
+channels have consumed the events, then the selector will attempt to write to
+the optional channels. A failure by any of the optional channels to consume the
+event is simply ignored and not retried.</p>
+<p>If there is an overlap between the optional channels and required channels for a
+specific header, the channel is considered to be required, and a failure in the
+channel will cause the entire set of required channels to be retried. For
+instance, in the above example, for the header &#8220;CA&#8221; mem-channel-1 is considered
+to be a required channel even though it is marked both as required and optional,
+and a failure to write to this channel will cause that
+event to be retried on <strong>all</strong> channels configured for the selector.</p>
+<p>Note that if a header does not have any required channels, then the event will
+be written to the default channels and will be attempted to be written to the
+optional channels for that header. Specifying optional channels will still cause
+the event to be written to the default channels, if no required channels are
+specified. If no channels are designated as default and there are no required,
+the selector will attempt to write the events to the optional channels. Any
+failures are simply ignored in that case.</p>
+</div>
+<div class="section" id="ssl-tls-support">
+<h3>SSL/TLS support<a class="headerlink" href="#ssl-tls-support" title="Permalink to this headline">¶</a></h3>
+<p>Several Flume components support the SSL/TLS protocols in order to communicate with other systems
+securely.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="55%" />
+<col width="45%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Component</th>
+<th class="head">SSL server or client</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>Avro Source</td>
+<td>server</td>
+</tr>
+<tr class="row-odd"><td>Avro Sink</td>
+<td>client</td>
+</tr>
+<tr class="row-even"><td>Thrift Source</td>
+<td>server</td>
+</tr>
+<tr class="row-odd"><td>Thrift Sink</td>
+<td>client</td>
+</tr>
+<tr class="row-even"><td>Kafka Source</td>
+<td>client</td>
+</tr>
+<tr class="row-odd"><td>Kafka Channel</td>
+<td>client</td>
+</tr>
+<tr class="row-even"><td>Kafka Sink</td>
+<td>client</td>
+</tr>
+<tr class="row-odd"><td>HTTP Source</td>
+<td>server</td>
+</tr>
+<tr class="row-even"><td>JMS Source</td>
+<td>client</td>
+</tr>
+<tr class="row-odd"><td>Syslog TCP Source</td>
+<td>server</td>
+</tr>
+<tr class="row-even"><td>Multiport Syslog TCP Source</td>
+<td>server</td>
+</tr>
+</tbody>
+</table>
+<p>The SSL compatible components have several configuration parameters to set up SSL, like
+enable SSL flag, keystore / truststore parameters (location, password, type) and additional
+SSL parameters (eg. disabled protocols).</p>
+<p>Enabling SSL for a component is always specified at component level in the agent configuration file.
+So some components may be configured to use SSL while others not (even with the same component type).</p>
+<p>The keystore / truststore setup can be specified at component level or globally.</p>
+<p>In case of the component level setup, the keystore / truststore is configured in the agent
+configuration file through component specific parameters. The advantage of this method is that the
+components can use different keystores (if this would be needed). The disadvantage is that the
+keystore parameters must be copied for each component in the agent configuration file.
+The component level setup is optional, but if it is defined, it has higher precedence than
+the global parameters.</p>
+<p>With the global setup, it is enough to define the keystore / truststore parameters once
+and use the same settings for all components, which means less and more centralized configuration.</p>
+<p>The global setup can be configured either through system properties or through environment variables.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="20%" />
+<col width="59%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">System property</th>
+<th class="head">Environment variable</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>javax.net.ssl.keyStore</td>
+<td>FLUME_SSL_KEYSTORE_PATH</td>
+<td>Keystore location</td>
+</tr>
+<tr class="row-odd"><td>javax.net.ssl.keyStorePassword</td>
+<td>FLUME_SSL_KEYSTORE_PASSWORD</td>
+<td>Keystore password</td>
+</tr>
+<tr class="row-even"><td>javax.net.ssl.keyStoreType</td>
+<td>FLUME_SSL_KEYSTORE_TYPE</td>
+<td>Keystore type (by default JKS)</td>
+</tr>
+<tr class="row-odd"><td>javax.net.ssl.trustStore</td>
+<td>FLUME_SSL_TRUSTSTORE_PATH</td>
+<td>Truststore location</td>
+</tr>
+<tr class="row-even"><td>javax.net.ssl.trustStorePassword</td>
+<td>FLUME_SSL_TRUSTSTORE_PASSWORD</td>
+<td>Truststore password</td>
+</tr>
+<tr class="row-odd"><td>javax.net.ssl.trustStoreType</td>
+<td>FLUME_SSL_TRUSTSTORE_TYPE</td>
+<td>Truststore type (by default JKS)</td>
+</tr>
+<tr class="row-even"><td>flume.ssl.include.protocols</td>
+<td>FLUME_SSL_INCLUDE_PROTOCOLS</td>
+<td>Protocols to include when calculating enabled protocols. A comma (,) separated list.
+Excluded protocols will be excluded from this list if provided.</td>
+</tr>
+<tr class="row-odd"><td>flume.ssl.exclude.protocols</td>
+<td>FLUME_SSL_EXCLUDE_PROTOCOLS</td>
+<td>Protocols to exclude when calculating enabled protocols. A comma (,) separated list.</td>
+</tr>
+<tr class="row-even"><td>flume.ssl.include.cipherSuites</td>
+<td>FLUME_SSL_INCLUDE_CIPHERSUITES</td>
+<td>Cipher suites to include when calculating enabled cipher suites. A comma (,) separated list.
+Excluded cipher suites will be excluded from this list if provided.</td>
+</tr>
+<tr class="row-odd"><td>flume.ssl.exclude.cipherSuites</td>
+<td>FLUME_SSL_EXCLUDE_CIPHERSUITES</td>
+<td>Cipher suites to exclude when calculating enabled cipher suites. A comma (,) separated list.</td>
+</tr>
+</tbody>
+</table>
+<p>The SSL system properties can either be passed on the command line or by setting the <tt class="docutils literal"><span class="pre">JAVA_OPTS</span></tt>
+environment variable in <em>conf/flume-env.sh</em>. (Although, using the command line is inadvisable because
+the commands including the passwords will be saved to the command history.)</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">export JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djavax.net.ssl.keyStore=/path/to/keystore.jks&quot;</span>
+<span class="na">export JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djavax.net.ssl.keyStorePassword=password&quot;</span>
+</pre></div>
+</div>
+<p>Flume uses the system properties defined in JSSE (Java Secure Socket Extension), so this is
+a standard way for setting up SSL. On the other hand, specifying passwords in system properties
+means that the passwords can be seen in the process list. For cases where it is not acceptable,
+it is also be possible to define the parameters in environment variables. Flume initializes
+the JSSE system properties from the corresponding environment variables internally in this case.</p>
+<p>The SSL environment variables can either be set in the shell environment before
+starting Flume or in <em>conf/flume-env.sh</em>. (Although, using the command line is inadvisable because
+the commands including the passwords will be saved to the command history.)</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">export FLUME_SSL_KEYSTORE_PATH</span><span class="o">=</span><span class="s">/path/to/keystore.jks</span>
+<span class="na">export FLUME_SSL_KEYSTORE_PASSWORD</span><span class="o">=</span><span class="s">password</span>
+</pre></div>
+</div>
+<p><strong>Please note:</strong></p>
+<ul class="simple">
+<li>SSL must be enabled at component level. Specifying the global SSL parameters alone will not
+have any effect.</li>
+<li>If the global SSL parameters are specified at multiple levels, the priority is the
+following (from higher to lower):<ul>
+<li>component parameters in agent config</li>
+<li>system properties</li>
+<li>environment variables</li>
+</ul>
+</li>
+<li>If SSL is enabled for a component, but the SSL parameters are not specified in any of the ways
+described above, then<ul>
+<li>in case of keystores: configuration error</li>
+<li>in case of truststores: the default truststore will be used (<tt class="docutils literal"><span class="pre">jssecacerts</span></tt> / <tt class="docutils literal"><span class="pre">cacerts</span></tt> in Oracle JDK)</li>
+</ul>
+</li>
+<li>The trustore password is optional in all cases. If not specified, then no integrity check will be
+performed on the truststore when it is opened by the JDK.</li>
+</ul>
+</div>
+<div class="section" id="source-and-sink-batch-sizes-and-channel-transaction-capacities">
+<h3>Source and sink batch sizes and channel transaction capacities<a class="headerlink" href="#source-and-sink-batch-sizes-and-channel-transaction-capacities" title="Permalink to this headline">¶</a></h3>
+<p>Sources and sinks can have a batch size parameter that determines the maximum number of events they
+process in one batch. This happens within a channel transaction that has an upper limit called
+transaction capacity. Batch size must be smaller than the channel&#8217;s transaction capacity.
+There is an explicit check to prevent incompatible settings. This check happens
+whenever the configuration is read.</p>
+</div>
+<div class="section" id="flume-sources">
+<h3>Flume Sources<a class="headerlink" href="#flume-sources" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="avro-source">
+<h4>Avro Source<a class="headerlink" href="#avro-source" title="Permalink to this headline">¶</a></h4>
+<p>Listens on Avro port and receives events from external Avro client streams.
+When paired with the built-in Avro Sink on another (previous hop) Flume agent,
+it can create tiered collection topologies.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="14%" />
+<col width="11%" />
+<col width="75%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">avro</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>bind</strong></td>
+<td>&#8211;</td>
+<td>hostname or IP address to listen on</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>Port # to bind to</td>
+</tr>
+<tr class="row-even"><td>threads</td>
+<td>&#8211;</td>
+<td>Maximum number of worker threads to spawn</td>
+</tr>
+<tr class="row-odd"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-even"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>compression-type</td>
+<td>none</td>
+<td>This can be &#8220;none&#8221; or &#8220;deflate&#8221;.  The compression-type must match the compression-type of matching AvroSource</td>
+</tr>
+<tr class="row-even"><td>ssl</td>
+<td>false</td>
+<td>Set this to true to enable SSL encryption. If SSL is enabled,
+you must also specify a &#8220;keystore&#8221; and a &#8220;keystore-password&#8221;,
+either through component level parameters (see below)
+or as global SSL parameters (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section).</td>
+</tr>
+<tr class="row-odd"><td>keystore</td>
+<td>&#8211;</td>
+<td>This is the path to a Java keystore file.
+If not specified here, then the global keystore will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-even"><td>keystore-password</td>
+<td>&#8211;</td>
+<td>The password for the Java keystore.
+If not specified here, then the global keystore password will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-odd"><td>keystore-type</td>
+<td>JKS</td>
+<td>The type of the Java keystore. This can be &#8220;JKS&#8221; or &#8220;PKCS12&#8221;.
+If not specified here, then the global keystore type will be used
+(if defined, otherwise the default is JKS).</td>
+</tr>
+<tr class="row-even"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude.
+SSLv3 will always be excluded in addition to the protocols specified.</td>
+</tr>
+<tr class="row-odd"><td>include-protocols</td>
+<td>&#8211;</td>
+<td>Space-separated list of SSL/TLS protocols to include.
+The enabled protocols will be the included protocols without the excluded protocols.
+If included-protocols is empty, it includes every supported protocols.</td>
+</tr>
+<tr class="row-even"><td>exclude-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to exclude.</td>
+</tr>
+<tr class="row-odd"><td>include-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to include.
+The enabled cipher suites will be the included cipher suites without the excluded cipher suites.
+If included-cipher-suites is empty, it includes every supported cipher suites.</td>
+</tr>
+<tr class="row-even"><td>ipFilter</td>
+<td>false</td>
+<td>Set this to true to enable ipFiltering for netty</td>
+</tr>
+<tr class="row-odd"><td>ipFilterRules</td>
+<td>&#8211;</td>
+<td>Define N netty ipFilter pattern rules with this config.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">avro</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">4141</span>
+</pre></div>
+</div>
+<p>Example of ipFilterRules</p>
+<p>ipFilterRules defines N netty ipFilters separated by a comma a pattern rule must be in this format.</p>
+<p>&lt;&#8217;allow&#8217; or deny&gt;:&lt;&#8217;ip&#8217; or &#8216;name&#8217; for computer name&gt;:&lt;pattern&gt;
+or
+allow/deny:ip/name:pattern</p>
+<p>example: ipFilterRules=allow:ip:127.*,allow:name:localhost,deny:ip:*</p>
+<p>Note that the first rule to match will apply as the example below shows from a client on the localhost</p>
+<p>This will Allow the client on localhost be deny clients from any other ip &#8220;allow:name:localhost,deny:ip:<em>&#8221;
+This will deny the client on localhost be allow clients from any other ip &#8220;deny:name:localhost,allow:ip:</em>&#8220;</p>
+</div>
+<div class="section" id="thrift-source">
+<h4>Thrift Source<a class="headerlink" href="#thrift-source" title="Permalink to this headline">¶</a></h4>
+<p>Listens on Thrift port and receives events from external Thrift client streams.
+When paired with the built-in ThriftSink on another (previous hop) Flume agent,
+it can create tiered collection topologies.
+Thrift source can be configured to start in secure mode by enabling kerberos authentication.
+agent-principal and agent-keytab are the properties used by the
+Thrift source to authenticate to the kerberos KDC.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="6%" />
+<col width="3%" />
+<col width="91%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">thrift</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>bind</strong></td>
+<td>&#8211;</td>
+<td>hostname or IP address to listen on</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>Port # to bind to</td>
+</tr>
+<tr class="row-even"><td>threads</td>
+<td>&#8211;</td>
+<td>Maximum number of worker threads to spawn</td>
+</tr>
+<tr class="row-odd"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space separated list of interceptors</td>
+</tr>
+<tr class="row-even"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>ssl</td>
+<td>false</td>
+<td>Set this to true to enable SSL encryption. If SSL is enabled,
+you must also specify a &#8220;keystore&#8221; and a &#8220;keystore-password&#8221;,
+either through component level parameters (see below)
+or as global SSL parameters (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section)</td>
+</tr>
+<tr class="row-even"><td>keystore</td>
+<td>&#8211;</td>
+<td>This is the path to a Java keystore file.
+If not specified here, then the global keystore will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-odd"><td>keystore-password</td>
+<td>&#8211;</td>
+<td>The password for the Java keystore.
+If not specified here, then the global keystore password will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-even"><td>keystore-type</td>
+<td>JKS</td>
+<td>The type of the Java keystore. This can be &#8220;JKS&#8221; or &#8220;PKCS12&#8221;.
+If not specified here, then the global keystore type will be used
+(if defined, otherwise the default is JKS).</td>
+</tr>
+<tr class="row-odd"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude.
+SSLv3 will always be excluded in addition to the protocols specified.</td>
+</tr>
+<tr class="row-even"><td>include-protocols</td>
+<td>&#8211;</td>
+<td>Space-separated list of SSL/TLS protocols to include.
+The enabled protocols will be the included protocols without the excluded protocols.
+If included-protocols is empty, it includes every supported protocols.</td>
+</tr>
+<tr class="row-odd"><td>exclude-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to exclude.</td>
+</tr>
+<tr class="row-even"><td>include-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to include.
+The enabled cipher suites will be the included cipher suites without the excluded cipher suites.</td>
+</tr>
+<tr class="row-odd"><td>kerberos</td>
+<td>false</td>
+<td>Set to true to enable kerberos authentication. In kerberos mode, agent-principal and agent-keytab  are required for successful authentication. The Thrift source in secure mode, will accept connections only from Thrift clients that have kerberos enabled and are successfully authenticated to the kerberos KDC.</td>
+</tr>
+<tr class="row-even"><td>agent-principal</td>
+<td>&#8211;</td>
+<td>The kerberos principal used by the Thrift Source to authenticate to the kerberos KDC.</td>
+</tr>
+<tr class="row-odd"><td>agent-keytab</td>
+<td>—-</td>
+<td>The keytab location used by the Thrift Source in combination with the agent-principal to authenticate to the kerberos KDC.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">thrift</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">4141</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="exec-source">
+<h4>Exec Source<a class="headerlink" href="#exec-source" title="Permalink to this headline">¶</a></h4>
+<p>Exec source runs a given Unix command on start-up and expects that process to
+continuously produce data on standard out (stderr is simply discarded, unless
+property logStdErr is set to true). If the process exits for any reason, the source also exits and
+will produce no further data. This means configurations such as <tt class="docutils literal"><span class="pre">cat</span> <span class="pre">[named</span> <span class="pre">pipe]</span></tt>
+or <tt class="docutils literal"><span class="pre">tail</span> <span class="pre">-F</span> <span class="pre">[file]</span></tt> are going to produce the desired results where as <tt class="docutils literal"><span class="pre">date</span></tt>
+will probably not - the former two commands produce streams of data where as the
+latter produces a single event and exits.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="8%" />
+<col width="6%" />
+<col width="85%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">exec</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>command</strong></td>
+<td>&#8211;</td>
+<td>The command to execute</td>
+</tr>
+<tr class="row-odd"><td>shell</td>
+<td>&#8211;</td>
+<td>A shell invocation used to run the command.  e.g. /bin/sh -c. Required only for commands relying on shell features like wildcards, back ticks, pipes etc.</td>
+</tr>
+<tr class="row-even"><td>restartThrottle</td>
+<td>10000</td>
+<td>Amount of time (in millis) to wait before attempting a restart</td>
+</tr>
+<tr class="row-odd"><td>restart</td>
+<td>false</td>
+<td>Whether the executed cmd should be restarted if it dies</td>
+</tr>
+<tr class="row-even"><td>logStdErr</td>
+<td>false</td>
+<td>Whether the command&#8217;s stderr should be logged</td>
+</tr>
+<tr class="row-odd"><td>batchSize</td>
+<td>20</td>
+<td>The max number of lines to read and send to the channel at a time</td>
+</tr>
+<tr class="row-even"><td>batchTimeout</td>
+<td>3000</td>
+<td>Amount of time (in milliseconds) to wait, if the buffer size was not reached, before data is pushed downstream</td>
+</tr>
+<tr class="row-odd"><td>selector.type</td>
+<td>replicating</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-even"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-odd"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-even"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">The problem with ExecSource and other asynchronous sources is that the
+source can not guarantee that if there is a failure to put the event
+into the Channel the client knows about it. In such cases, the data will
+be lost. As a for instance, one of the most commonly requested features
+is the <tt class="docutils literal"><span class="pre">tail</span> <span class="pre">-F</span> <span class="pre">[file]</span></tt>-like use case where an application writes
+to a log file on disk and Flume tails the file, sending each line as an
+event. While this is possible, there&#8217;s an obvious problem; what happens
+if the channel fills up and Flume can&#8217;t send an event? Flume has no way
+of indicating to the application writing the log file that it needs to
+retain the log or that the event hasn&#8217;t been sent, for some reason. If
+this doesn&#8217;t make sense, you need only know this: Your application can
+never guarantee data has been received when using a unidirectional
+asynchronous interface such as ExecSource! As an extension of this
+warning - and to be completely clear - there is absolutely zero guarantee
+of event delivery when using this source. For stronger reliability
+guarantees, consider the Spooling Directory Source, Taildir Source or direct integration
+with Flume via the SDK.</p>
+</div>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">exec</span>
+<span class="na">a1.sources.r1.command</span> <span class="o">=</span> <span class="s">tail -F /var/log/secure</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+<p>The &#8216;shell&#8217; config is used to invoke the &#8216;command&#8217; through a command shell (such as Bash
+or Powershell). The &#8216;command&#8217; is passed as an argument to &#8216;shell&#8217; for execution. This
+allows the &#8216;command&#8217; to use features from the shell such as wildcards, back ticks, pipes,
+loops, conditionals etc. In the absence of the &#8216;shell&#8217; config, the &#8216;command&#8217; will be
+invoked directly.  Common values for &#8216;shell&#8217; :  &#8216;/bin/sh -c&#8217;, &#8216;/bin/ksh -c&#8217;,
+&#8216;cmd /c&#8217;,  &#8216;powershell -Command&#8217;, etc.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.tailsource-1.type</span> <span class="o">=</span> <span class="s">exec</span>
+<span class="na">a1.sources.tailsource-1.shell</span> <span class="o">=</span> <span class="s">/bin/bash -c</span>
+<span class="na">a1.sources.tailsource-1.command</span> <span class="o">=</span> <span class="s">for i in /path/*.txt; do cat $i; done</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="jms-source">
+<h4>JMS Source<a class="headerlink" href="#jms-source" title="Permalink to this headline">¶</a></h4>
+<p>JMS Source reads messages from a JMS destination such as a queue or topic. Being a JMS
+application it should work with any JMS provider but has only been tested with ActiveMQ.
+The JMS source provides configurable batch size, message selector, user/pass, and message
+to flume event converter. Note that the vendor provided JMS jars should be included in the
+Flume classpath using plugins.d directory (preferred), &#8211;classpath on command line, or
+via FLUME_CLASSPATH variable in flume-env.sh.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="9%" />
+<col width="71%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">jms</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>initialContextFactory</strong></td>
+<td>&#8211;</td>
+<td>Inital Context Factory, e.g: org.apache.activemq.jndi.ActiveMQInitialContextFactory</td>
+</tr>
+<tr class="row-odd"><td><strong>connectionFactory</strong></td>
+<td>&#8211;</td>
+<td>The JNDI name the connection factory should appear as</td>
+</tr>
+<tr class="row-even"><td><strong>providerURL</strong></td>
+<td>&#8211;</td>
+<td>The JMS provider URL</td>
+</tr>
+<tr class="row-odd"><td><strong>destinationName</strong></td>
+<td>&#8211;</td>
+<td>Destination name</td>
+</tr>
+<tr class="row-even"><td><strong>destinationType</strong></td>
+<td>&#8211;</td>
+<td>Destination type (queue or topic)</td>
+</tr>
+<tr class="row-odd"><td>messageSelector</td>
+<td>&#8211;</td>
+<td>Message selector to use when creating the consumer</td>
+</tr>
+<tr class="row-even"><td>userName</td>
+<td>&#8211;</td>
+<td>Username for the destination/provider</td>
+</tr>
+<tr class="row-odd"><td>passwordFile</td>
+<td>&#8211;</td>
+<td>File containing the password for the destination/provider</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>Number of messages to consume in one batch</td>
+</tr>
+<tr class="row-odd"><td>converter.type</td>
+<td>DEFAULT</td>
+<td>Class to use to convert messages to flume events. See below.</td>
+</tr>
+<tr class="row-even"><td>converter.*</td>
+<td>&#8211;</td>
+<td>Converter properties.</td>
+</tr>
+<tr class="row-odd"><td>converter.charset</td>
+<td>UTF-8</td>
+<td>Default converter only. Charset to use when converting JMS TextMessages to byte arrays.</td>
+</tr>
+<tr class="row-even"><td>createDurableSubscription</td>
+<td>false</td>
+<td>Whether to create durable subscription. Durable subscription can only be used with
+destinationType topic. If true, &#8220;clientId&#8221; and &#8220;durableSubscriptionName&#8221;
+have to be specified.</td>
+</tr>
+<tr class="row-odd"><td>clientId</td>
+<td>&#8211;</td>
+<td>JMS client identifier set on Connection right after it is created.
+Required for durable subscriptions.</td>
+</tr>
+<tr class="row-even"><td>durableSubscriptionName</td>
+<td>&#8211;</td>
+<td>Name used to identify the durable subscription. Required for durable subscriptions.</td>
+</tr>
+</tbody>
+</table>
+<div class="section" id="jms-message-converter">
+<h5>JMS message converter<a class="headerlink" href="#jms-message-converter" title="Permalink to this headline">¶</a></h5>
+<p>The JMS source allows pluggable converters, though it&#8217;s likely the default converter will work
+for most purposes. The default converter is able to convert Bytes, Text, and Object messages
+to FlumeEvents. In all cases, the properties in the message are added as headers to the
+FlumeEvent.</p>
+<dl class="docutils">
+<dt>BytesMessage:</dt>
+<dd>Bytes of message are copied to body of the FlumeEvent. Cannot convert more than 2GB
+of data per message.</dd>
+<dt>TextMessage:</dt>
+<dd>Text of message is converted to a byte array and copied to the body of the
+FlumeEvent. The default converter uses UTF-8 by default but this is configurable.</dd>
+<dt>ObjectMessage:</dt>
+<dd>Object is written out to a ByteArrayOutputStream wrapped in an ObjectOutputStream and
+the resulting array is copied to the body of the FlumeEvent.</dd>
+</dl>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">jms</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.initialContextFactory</span> <span class="o">=</span> <span class="s">org.apache.activemq.jndi.ActiveMQInitialContextFactory</span>
+<span class="na">a1.sources.r1.connectionFactory</span> <span class="o">=</span> <span class="s">GenericConnectionFactory</span>
+<span class="na">a1.sources.r1.providerURL</span> <span class="o">=</span> <span class="s">tcp://mqserver:61616</span>
+<span class="na">a1.sources.r1.destinationName</span> <span class="o">=</span> <span class="s">BUSINESS_DATA</span>
+<span class="na">a1.sources.r1.destinationType</span> <span class="o">=</span> <span class="s">QUEUE</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="ssl-and-jms-source">
+<h5>SSL and JMS Source<a class="headerlink" href="#ssl-and-jms-source" title="Permalink to this headline">¶</a></h5>
+<p>JMS client implementations typically support to configure SSL/TLS via some Java system properties defined by JSSE
+(Java Secure Socket Extension). Specifying these system properties for Flume&#8217;s JVM, JMS Source (or more precisely the
+JMS client implementation used by the JMS Source) can connect to the JMS server through SSL (of course only when the JMS
+server has also been set up to use SSL).
+It should work with any JMS provider and has been tested with ActiveMQ, IBM MQ and Oracle WebLogic.</p>
+<p>The following sections describe the SSL configuration steps needed on the Flume side only. You can find more detailed
+descriptions about the server side setup of the different JMS providers and also full working configuration examples on
+Flume Wiki.</p>
+<p><strong>SSL transport / server authentication:</strong></p>
+<p>If the JMS server uses self-signed certificate or its certificate is signed by a non-trusted CA (eg. the company&#8217;s own
+CA), then a truststore (containing the right certificate) needs to be set up and passed to Flume. It can be done via
+the global SSL parameters. For more details about the global SSL setup, see the <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section.</p>
+<p>Some JMS providers require SSL specific JNDI Initial Context Factory and/or Provider URL settings when using SSL (eg.
+ActiveMQ uses ssl:// URL prefix instead of tcp://).
+In this case the source properties (<tt class="docutils literal"><span class="pre">initialContextFactory</span></tt> and/or <tt class="docutils literal"><span class="pre">providerURL</span></tt>) have to be adjusted in the agent
+config file.</p>
+<p><strong>Client certificate authentication (two-way SSL):</strong></p>
+<p>JMS Source can authenticate to the JMS server through client certificate authentication instead of the usual
+user/password login (when SSL is used and the JMS server is configured to accept this kind of authentication).</p>
+<p>The keystore containing Flume&#8217;s key used for the authentication needs to be configured via the global SSL parameters
+again. For more details about the global SSL setup, see the <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section.</p>
+<p>The keystore should contain only one key (if multiple keys are present, then the first one will be used).
+The key password must be the same as the keystore password.</p>
+<p>In case of client certificate authentication, it is not needed to specify the <tt class="docutils literal"><span class="pre">userName</span></tt> / <tt class="docutils literal"><span class="pre">passwordFile</span></tt> properties
+for the JMS Source in the Flume agent config file.</p>
+<p><strong>Please note:</strong></p>
+<p>There are no component level configuration parameters for JMS Source unlike in case of other components.
+No enable SSL flag either.
+SSL setup is controlled by JNDI/Provider URL settings (ultimately the JMS server settings) and by the presence / absence
+of the truststore / keystore.</p>
+</div>
+</div>
+<div class="section" id="spooling-directory-source">
+<h4>Spooling Directory Source<a class="headerlink" href="#spooling-directory-source" title="Permalink to this headline">¶</a></h4>
+<p>This source lets you ingest data by placing files to be ingested into a
+&#8220;spooling&#8221; directory on disk.
+This source will watch the specified directory for new files, and will parse
+events out of new files as they appear.
+The event parsing logic is pluggable.
+After a given file has been fully read
+into the channel, completion by default is indicated by renaming the file or it can be deleted or the trackerDir is used
+to keep track of processed files.</p>
+<p>Unlike the Exec source, this source is reliable and will not miss data, even if
+Flume is restarted or killed. In exchange for this reliability, only immutable,
+uniquely-named files must be dropped into the spooling directory. Flume tries
+to detect these problem conditions and will fail loudly if they are violated:</p>
+<ol class="arabic simple">
+<li>If a file is written to after being placed into the spooling directory,
+Flume will print an error to its log file and stop processing.</li>
+<li>If a file name is reused at a later time, Flume will print an error to its
+log file and stop processing.</li>
+</ol>
+<p>To avoid the above issues, it may be useful to add a unique identifier
+(such as a timestamp) to log file names when they are moved into the spooling
+directory.</p>
+<p>Despite the reliability guarantees of this source, there are still
+cases in which events may be duplicated if certain downstream failures occur.
+This is consistent with the guarantees offered by other Flume components.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="10%" />
+<col width="72%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">spooldir</span></tt>.</td>
+</tr>
+<tr class="row-even"><td><strong>spoolDir</strong></td>
+<td>&#8211;</td>
+<td>The directory from which to read files from.</td>
+</tr>
+<tr class="row-odd"><td>fileSuffix</td>
+<td>.COMPLETED</td>
+<td>Suffix to append to completely ingested files</td>
+</tr>
+<tr class="row-even"><td>deletePolicy</td>
+<td>never</td>
+<td>When to delete completed files: <tt class="docutils literal"><span class="pre">never</span></tt> or <tt class="docutils literal"><span class="pre">immediate</span></tt></td>
+</tr>
+<tr class="row-odd"><td>fileHeader</td>
+<td>false</td>
+<td>Whether to add a header storing the absolute path filename.</td>
+</tr>
+<tr class="row-even"><td>fileHeaderKey</td>
+<td>file</td>
+<td>Header key to use when appending absolute path filename to event header.</td>
+</tr>
+<tr class="row-odd"><td>basenameHeader</td>
+<td>false</td>
+<td>Whether to add a header storing the basename of the file.</td>
+</tr>
+<tr class="row-even"><td>basenameHeaderKey</td>
+<td>basename</td>
+<td>Header Key to use when appending  basename of file to event header.</td>
+</tr>
+<tr class="row-odd"><td>includePattern</td>
+<td>^.*$</td>
+<td>Regular expression specifying which files to include.
+It can used together with <tt class="docutils literal"><span class="pre">ignorePattern</span></tt>.
+If a file matches both <tt class="docutils literal"><span class="pre">ignorePattern</span></tt> and <tt class="docutils literal"><span class="pre">includePattern</span></tt> regex,
+the file is ignored.</td>
+</tr>
+<tr class="row-even"><td>ignorePattern</td>
+<td>^$</td>
+<td>Regular expression specifying which files to ignore (skip).
+It can used together with <tt class="docutils literal"><span class="pre">includePattern</span></tt>.
+If a file matches both <tt class="docutils literal"><span class="pre">ignorePattern</span></tt> and <tt class="docutils literal"><span class="pre">includePattern</span></tt> regex,
+the file is ignored.</td>
+</tr>
+<tr class="row-odd"><td>trackerDir</td>
+<td>.flumespool</td>
+<td>Directory to store metadata related to processing of files.
+If this path is not an absolute path, then it is interpreted as relative to the spoolDir.</td>
+</tr>
+<tr class="row-even"><td>trackingPolicy</td>
+<td>rename</td>
+<td>The tracking policy defines how file processing is tracked. It can be &#8220;rename&#8221; or
+&#8220;tracker_dir&#8221;. This parameter is only effective if the deletePolicy is &#8220;never&#8221;.
+&#8220;rename&#8221; - After processing files they get renamed according to the fileSuffix parameter.
+&#8220;tracker_dir&#8221; - Files are not renamed but a new empty file is created in the trackerDir.
+The new tracker file name is derived from the ingested one plus the fileSuffix.</td>
+</tr>
+<tr class="row-odd"><td>consumeOrder</td>
+<td>oldest</td>
+<td>In which order files in the spooling directory will be consumed <tt class="docutils literal"><span class="pre">oldest</span></tt>,
+<tt class="docutils literal"><span class="pre">youngest</span></tt> and <tt class="docutils literal"><span class="pre">random</span></tt>. In case of <tt class="docutils literal"><span class="pre">oldest</span></tt> and <tt class="docutils literal"><span class="pre">youngest</span></tt>, the last modified
+time of the files will be used to compare the files. In case of a tie, the file
+with smallest lexicographical order will be consumed first. In case of <tt class="docutils literal"><span class="pre">random</span></tt> any
+file will be picked randomly. When using <tt class="docutils literal"><span class="pre">oldest</span></tt> and <tt class="docutils literal"><span class="pre">youngest</span></tt> the whole
+directory will be scanned to pick the oldest/youngest file, which might be slow if there
+are a large number of files, while using <tt class="docutils literal"><span class="pre">random</span></tt> may cause old files to be consumed
+very late if new files keep coming in the spooling directory.</td>
+</tr>
+<tr class="row-even"><td>pollDelay</td>
+<td>500</td>
+<td>Delay (in milliseconds) used when polling for new files.</td>
+</tr>
+<tr class="row-odd"><td>recursiveDirectorySearch</td>
+<td>false</td>
+<td>Whether to monitor sub directories for new files to read.</td>
+</tr>
+<tr class="row-even"><td>maxBackoff</td>
+<td>4000</td>
+<td>The maximum time (in millis) to wait between consecutive attempts to
+write to the channel(s) if the channel is full. The source will start at
+a low backoff and increase it exponentially each time the channel throws a
+ChannelException, up to the value specified by this parameter.</td>
+</tr>
+<tr class="row-odd"><td>batchSize</td>
+<td>100</td>
+<td>Granularity at which to batch transfer to the channel</td>
+</tr>
+<tr class="row-even"><td>inputCharset</td>
+<td>UTF-8</td>
+<td>Character set used by deserializers that treat the input file as text.</td>
+</tr>
+<tr class="row-odd"><td>decodeErrorPolicy</td>
+<td><tt class="docutils literal"><span class="pre">FAIL</span></tt></td>
+<td>What to do when we see a non-decodable character in the input file.
+<tt class="docutils literal"><span class="pre">FAIL</span></tt>: Throw an exception and fail to parse the file.
+<tt class="docutils literal"><span class="pre">REPLACE</span></tt>: Replace the unparseable character with the &#8220;replacement character&#8221; char,
+typically Unicode U+FFFD.
+<tt class="docutils literal"><span class="pre">IGNORE</span></tt>: Drop the unparseable character sequence.</td>
+</tr>
+<tr class="row-even"><td>deserializer</td>
+<td><tt class="docutils literal"><span class="pre">LINE</span></tt></td>
+<td>Specify the deserializer used to parse the file into events.
+Defaults to parsing each line as an event. The class specified must implement
+<tt class="docutils literal"><span class="pre">EventDeserializer.Builder</span></tt>.</td>
+</tr>
+<tr class="row-odd"><td>deserializer.*</td>
+<td>&nbsp;</td>
+<td>Varies per event deserializer.</td>
+</tr>
+<tr class="row-even"><td>bufferMaxLines</td>
+<td>&#8211;</td>
+<td>(Obsolete) This option is now ignored.</td>
+</tr>
+<tr class="row-odd"><td>bufferMaxLineLength</td>
+<td>5000</td>
+<td>(Deprecated) Maximum length of a line in the commit buffer. Use deserializer.maxLineLength instead.</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>replicating</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for an agent named agent-1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">ch-1</span>
+<span class="na">a1.sources</span> <span class="o">=</span> <span class="s">src-1</span>
+
+<span class="na">a1.sources.src-1.type</span> <span class="o">=</span> <span class="s">spooldir</span>
+<span class="na">a1.sources.src-1.channels</span> <span class="o">=</span> <span class="s">ch-1</span>
+<span class="na">a1.sources.src-1.spoolDir</span> <span class="o">=</span> <span class="s">/var/log/apache/flumeSpool</span>
+<span class="na">a1.sources.src-1.fileHeader</span> <span class="o">=</span> <span class="s">true</span>
+</pre></div>
+</div>
+<div class="section" id="event-deserializers">
+<h5>Event Deserializers<a class="headerlink" href="#event-deserializers" title="Permalink to this headline">¶</a></h5>
+<p>The following event deserializers ship with Flume.</p>
+<div class="section" id="line">
+<h6>LINE<a class="headerlink" href="#line" title="Permalink to this headline">¶</a></h6>
+<p>This deserializer generates one event per line of text input.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="29%" />
+<col width="14%" />
+<col width="57%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>deserializer.maxLineLength</td>
+<td>2048</td>
+<td>Maximum number of characters to include in a single event.
+If a line exceeds this length, it is truncated, and the
+remaining characters on the line will appear in a
+subsequent event.</td>
+</tr>
+<tr class="row-odd"><td>deserializer.outputCharset</td>
+<td>UTF-8</td>
+<td>Charset to use for encoding events put into the channel.</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="avro">
+<h6>AVRO<a class="headerlink" href="#avro" title="Permalink to this headline">¶</a></h6>
+<p>This deserializer is able to read an Avro container file, and it generates
+one event per Avro record in the file.
+Each event is annotated with a header that indicates the schema used.
+The body of the event is the binary Avro record data, not
+including the schema or the rest of the container file elements.</p>
+<p>Note that if the spool directory source must retry putting one of these events
+onto a channel (for example, because the channel is full), then it will reset
+and retry from the most recent Avro container file sync point. To reduce
+potential event duplication in such a failure scenario, write sync markers more
+frequently in your Avro input files.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="26%" />
+<col width="12%" />
+<col width="62%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>deserializer.schemaType</td>
+<td>HASH</td>
+<td>How the schema is represented. By default, or when the value <tt class="docutils literal"><span class="pre">HASH</span></tt>
+is specified, the Avro schema is hashed and
+the hash is stored in every event in the event header
+&#8220;flume.avro.schema.hash&#8221;. If <tt class="docutils literal"><span class="pre">LITERAL</span></tt> is specified, the JSON-encoded
+schema itself is stored in every event in the event header
+&#8220;flume.avro.schema.literal&#8221;. Using <tt class="docutils literal"><span class="pre">LITERAL</span></tt> mode is relatively
+inefficient compared to <tt class="docutils literal"><span class="pre">HASH</span></tt> mode.</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="blobdeserializer">
+<h6>BlobDeserializer<a class="headerlink" href="#blobdeserializer" title="Permalink to this headline">¶</a></h6>
+<p>This deserializer reads a Binary Large Object (BLOB) per event, typically one BLOB per file. For example a PDF or JPG file. Note that this approach is not suitable for very large objects because the entire BLOB is buffered in RAM.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="14%" />
+<col width="67%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>deserializer</strong></td>
+<td>&#8211;</td>
+<td>The FQCN of this class: <tt class="docutils literal"><span class="pre">org.apache.flume.sink.solr.morphline.BlobDeserializer$Builder</span></tt></td>
+</tr>
+<tr class="row-odd"><td>deserializer.maxBlobLength</td>
+<td>100000000</td>
+<td>The maximum number of bytes to read and buffer for a given request</td>
+</tr>
+</tbody>
+</table>
+</div>
+</div>
+</div>
+<div class="section" id="taildir-source">
+<h4>Taildir Source<a class="headerlink" href="#taildir-source" title="Permalink to this headline">¶</a></h4>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><strong>This source is provided as a preview feature. It does not work on Windows.</strong></p>
+</div>
+<p>Watch the specified files, and tail them in nearly real-time once detected new lines appended to the each files.
+If the new lines are being written, this source will retry reading them in wait for the completion of the write.</p>
+<p>This source is reliable and will not miss data even when the tailing files rotate.
+It periodically writes the last read position of each files on the given position file in JSON format.
+If Flume is stopped or down for some reason, it can restart tailing from the position written on the existing position file.</p>
+<p>In other use case, this source can also start tailing from the arbitrary position for each files using the given position file.
+When there is no position file on the specified path, it will start tailing from the first line of each files by default.</p>
+<p>Files will be consumed in order of their modification time. File with the oldest modification time will be consumed first.</p>
+<p>This source does not rename or delete or do any modifications to the file being tailed.
+Currently this source does not support tailing binary files. It reads text files line by line.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="19%" />
+<col width="16%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">TAILDIR</span></tt>.</td>
+</tr>
+<tr class="row-even"><td><strong>filegroups</strong></td>
+<td>&#8211;</td>
+<td>Space-separated list of file groups. Each file group indicates a set of files to be tailed.</td>
+</tr>
+<tr class="row-odd"><td><strong>filegroups.&lt;filegroupName&gt;</strong></td>
+<td>&#8211;</td>
+<td>Absolute path of the file group. Regular expression (and not file system patterns) can be used for filename only.</td>
+</tr>
+<tr class="row-even"><td>positionFile</td>
+<td>~/.flume/taildir_position.json</td>
+<td>File in JSON format to record the inode, the absolute path and the last position of each tailing file.</td>
+</tr>
+<tr class="row-odd"><td>headers.&lt;filegroupName&gt;.&lt;headerKey&gt;</td>
+<td>&#8211;</td>
+<td>Header value which is the set with header key. Multiple headers can be specified for one file group.</td>
+</tr>
+<tr class="row-even"><td>byteOffsetHeader</td>
+<td>false</td>
+<td>Whether to add the byte offset of a tailed line to a header called &#8216;byteoffset&#8217;.</td>
+</tr>
+<tr class="row-odd"><td>skipToEnd</td>
+<td>false</td>
+<td>Whether to skip the position to EOF in the case of files not written on the position file.</td>
+</tr>
+<tr class="row-even"><td>idleTimeout</td>
+<td>120000</td>
+<td>Time (ms) to close inactive files. If the closed file is appended new lines to, this source will automatically re-open it.</td>
+</tr>
+<tr class="row-odd"><td>writePosInterval</td>
+<td>3000</td>
+<td>Interval time (ms) to write the last position of each file on the position file.</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>Max number of lines to read and send to the channel at a time. Using the default is usually fine.</td>
+</tr>
+<tr class="row-odd"><td>maxBatchCount</td>
+<td>Long.MAX_VALUE</td>
+<td>Controls the number of batches being read consecutively from the same file.
+If the source is tailing multiple files and one of them is written at a fast rate,
+it can prevent other files to be processed, because the busy file would be read in an endless loop.
+In this case lower this value.</td>
+</tr>
+<tr class="row-even"><td>backoffSleepIncrement</td>
+<td>1000</td>
+<td>The increment for time delay before reattempting to poll for new data, when the last attempt did not find any new data.</td>
+</tr>
+<tr class="row-odd"><td>maxBackoffSleep</td>
+<td>5000</td>
+<td>The max time delay between each reattempt to poll for new data, when the last attempt did not find any new data.</td>
+</tr>
+<tr class="row-even"><td>cachePatternMatching</td>
+<td>true</td>
+<td>Listing directories and applying the filename regex pattern may be time consuming for directories
+containing thousands of files. Caching the list of matching files can improve performance.
+The order in which files are consumed will also be cached.
+Requires that the file system keeps track of modification times with at least a 1-second granularity.</td>
+</tr>
+<tr class="row-odd"><td>fileHeader</td>
+<td>false</td>
+<td>Whether to add a header storing the absolute path filename.</td>
+</tr>
+<tr class="row-even"><td>fileHeaderKey</td>
+<td>file</td>
+<td>Header key to use when appending absolute path filename to event header.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">TAILDIR</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.positionFile</span> <span class="o">=</span> <span class="s">/var/log/flume/taildir_position.json</span>
+<span class="na">a1.sources.r1.filegroups</span> <span class="o">=</span> <span class="s">f1 f2</span>
+<span class="na">a1.sources.r1.filegroups.f1</span> <span class="o">=</span> <span class="s">/var/log/test1/example.log</span>
+<span class="na">a1.sources.r1.headers.f1.headerKey1</span> <span class="o">=</span> <span class="s">value1</span>
+<span class="na">a1.sources.r1.filegroups.f2</span> <span class="o">=</span> <span class="s">/var/log/test2/.*log.*</span>
+<span class="na">a1.sources.r1.headers.f2.headerKey1</span> <span class="o">=</span> <span class="s">value2</span>
+<span class="na">a1.sources.r1.headers.f2.headerKey2</span> <span class="o">=</span> <span class="s">value2-2</span>
+<span class="na">a1.sources.r1.fileHeader</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sources.ri.maxBatchCount</span> <span class="o">=</span> <span class="s">1000</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="twitter-1-firehose-source-experimental">
+<h4>Twitter 1% firehose Source (experimental)<a class="headerlink" href="#twitter-1-firehose-source-experimental" title="Permalink to this headline">¶</a></h4>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">This source is highly experimental and may change between minor versions of Flume.
+Use at your own risk.</p>
+</div>
+<p>Experimental source that connects via Streaming API to the 1% sample twitter
+firehose, continuously downloads tweets, converts them to Avro format and
+sends Avro events to a downstream Flume sink. Requires the consumer and
+access tokens and secrets of a Twitter developer account.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="9%" />
+<col width="72%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.source.twitter.TwitterSource</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>consumerKey</strong></td>
+<td>&#8211;</td>
+<td>OAuth consumer key</td>
+</tr>
+<tr class="row-odd"><td><strong>consumerSecret</strong></td>
+<td>&#8211;</td>
+<td>OAuth consumer secret</td>
+</tr>
+<tr class="row-even"><td><strong>accessToken</strong></td>
+<td>&#8211;</td>
+<td>OAuth access token</td>
+</tr>
+<tr class="row-odd"><td><strong>accessTokenSecret</strong></td>
+<td>&#8211;</td>
+<td>OAuth token secret</td>
+</tr>
+<tr class="row-even"><td>maxBatchSize</td>
+<td>1000</td>
+<td>Maximum number of twitter messages to put in a single batch</td>
+</tr>
+<tr class="row-odd"><td>maxBatchDurationMillis</td>
+<td>1000</td>
+<td>Maximum number of milliseconds to wait before closing a batch</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.twitter.TwitterSource</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.consumerKey</span> <span class="o">=</span> <span class="s">YOUR_TWITTER_CONSUMER_KEY</span>
+<span class="na">a1.sources.r1.consumerSecret</span> <span class="o">=</span> <span class="s">YOUR_TWITTER_CONSUMER_SECRET</span>
+<span class="na">a1.sources.r1.accessToken</span> <span class="o">=</span> <span class="s">YOUR_TWITTER_ACCESS_TOKEN</span>
+<span class="na">a1.sources.r1.accessTokenSecret</span> <span class="o">=</span> <span class="s">YOUR_TWITTER_ACCESS_TOKEN_SECRET</span>
+<span class="na">a1.sources.r1.maxBatchSize</span> <span class="o">=</span> <span class="s">10</span>
+<span class="na">a1.sources.r1.maxBatchDurationMillis</span> <span class="o">=</span> <span class="s">200</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="kafka-source">
+<h4>Kafka Source<a class="headerlink" href="#kafka-source" title="Permalink to this headline">¶</a></h4>
+<p>Kafka Source is an Apache Kafka consumer that reads messages from Kafka topics.
+If you have multiple Kafka sources running, you can configure them with the same Consumer Group
+so each will read a unique set of partitions for the topics. This currently supports Kafka server releases 0.10.1.0 or higher. Testing was done up to 2.0.1 that was the highest avilable version at the time of the release.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="19%" />
+<col width="6%" />
+<col width="75%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.source.kafka.KafkaSource</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>kafka.bootstrap.servers</strong></td>
+<td>&#8211;</td>
+<td>List of brokers in the Kafka cluster used by the source</td>
+</tr>
+<tr class="row-odd"><td>kafka.consumer.group.id</td>
+<td>flume</td>
+<td>Unique identified of consumer group. Setting the same id in multiple sources or agents
+indicates that they are part of the same consumer group</td>
+</tr>
+<tr class="row-even"><td><strong>kafka.topics</strong></td>
+<td>&#8211;</td>
+<td>Comma-separated list of topics the Kafka consumer will read messages from.</td>
+</tr>
+<tr class="row-odd"><td><strong>kafka.topics.regex</strong></td>
+<td>&#8211;</td>
+<td>Regex that defines set of topics the source is subscribed on. This property has higher priority
+than <tt class="docutils literal"><span class="pre">kafka.topics</span></tt> and overrides <tt class="docutils literal"><span class="pre">kafka.topics</span></tt> if exists.</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>1000</td>
+<td>Maximum number of messages written to Channel in one batch</td>
+</tr>
+<tr class="row-odd"><td>batchDurationMillis</td>
+<td>1000</td>
+<td>Maximum time (in ms) before a batch will be written to Channel
+The batch will be written whenever the first of size and time will be reached.</td>
+</tr>
+<tr class="row-even"><td>backoffSleepIncrement</td>
+<td>1000</td>
+<td>Initial and incremental wait time that is triggered when a Kafka Topic appears to be empty.
+Wait period will reduce aggressive pinging of an empty Kafka Topic.  One second is ideal for
+ingestion use cases but a lower value may be required for low latency operations with
+interceptors.</td>
+</tr>
+<tr class="row-odd"><td>maxBackoffSleep</td>
+<td>5000</td>
+<td>Maximum wait time that is triggered when a Kafka Topic appears to be empty.  Five seconds is
+ideal for ingestion use cases but a lower value may be required for low latency operations
+with interceptors.</td>
+</tr>
+<tr class="row-even"><td>useFlumeEventFormat</td>
+<td>false</td>
+<td>By default events are taken as bytes from the Kafka topic directly into the event body. Set to
+true to read events as the Flume Avro binary format. Used in conjunction with the same property
+on the KafkaSink or with the parseAsFlumeEvent property on the Kafka Channel this will preserve
+any Flume headers sent on the producing side.</td>
+</tr>
+<tr class="row-odd"><td>setTopicHeader</td>
+<td>true</td>
+<td>When set to true, stores the topic of the retrieved message into a header, defined by the
+<tt class="docutils literal"><span class="pre">topicHeader</span></tt> property.</td>
+</tr>
+<tr class="row-even"><td>topicHeader</td>
+<td>topic</td>
+<td>Defines the name of the header in which to store the name of the topic the message was received
+from, if the <tt class="docutils literal"><span class="pre">setTopicHeader</span></tt> property is set to <tt class="docutils literal"><span class="pre">true</span></tt>. Care should be taken if combining
+with the Kafka Sink <tt class="docutils literal"><span class="pre">topicHeader</span></tt> property so as to avoid sending the message back to the same
+topic in a loop.</td>
+</tr>
+<tr class="row-odd"><td>kafka.consumer.security.protocol</td>
+<td>PLAINTEXT</td>
+<td>Set to SASL_PLAINTEXT, SASL_SSL or SSL if writing to Kafka using some level of security. See below for additional info on secure setup.</td>
+</tr>
+<tr class="row-even"><td><em>more consumer security props</em></td>
+<td>&nbsp;</td>
+<td>If using SASL_PLAINTEXT, SASL_SSL or SSL refer to <a class="reference external" href="http://kafka.apache.org/documentation.html#security">Kafka security</a> for additional
+properties that need to be set on consumer.</td>
+</tr>
+<tr class="row-odd"><td>Other Kafka Consumer Properties</td>
+<td>&#8211;</td>
+<td>These properties are used to configure the Kafka Consumer. Any consumer property supported
+by Kafka can be used. The only requirement is to prepend the property name with the prefix
+<tt class="docutils literal"><span class="pre">kafka.consumer</span></tt>.
+For example: <tt class="docutils literal"><span class="pre">kafka.consumer.auto.offset.reset</span></tt></td>
+</tr>
+</tbody>
+</table>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">The Kafka Source overrides two Kafka consumer parameters:
+auto.commit.enable is set to &#8220;false&#8221; by the source and every batch is committed. Kafka source guarantees at least once
+strategy of messages retrieval. The duplicates can be present when the source starts.
+The Kafka Source also provides defaults for the key.deserializer(org.apache.kafka.common.serialization.StringSerializer)
+and value.deserializer(org.apache.kafka.common.serialization.ByteArraySerializer). Modification of these parameters is not recommended.</p>
+</div>
+<p>Deprecated Properties</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="21%" />
+<col width="13%" />
+<col width="66%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>topic</td>
+<td>&#8211;</td>
+<td>Use kafka.topics</td>
+</tr>
+<tr class="row-odd"><td>groupId</td>
+<td>flume</td>
+<td>Use kafka.consumer.group.id</td>
+</tr>
+<tr class="row-even"><td>zookeeperConnect</td>
+<td>&#8211;</td>
+<td>Is no longer supported by Kafka consumer client since 0.9.x. Use kafka.bootstrap.servers
+to establish connection with Kafka cluster</td>
+</tr>
+<tr class="row-odd"><td>migrateZookeeperOffsets</td>
+<td>true</td>
+<td>When no Kafka stored offset is found, look up the offsets in Zookeeper and commit them to Kafka.
+This should be true to support seamless Kafka client migration from older versions of Flume.
+Once migrated this can be set to false, though that should generally not be required.
+If no Zookeeper offset is found, the Kafka configuration kafka.consumer.auto.offset.reset
+defines how offsets are handled.
+Check <a class="reference external" href="http://kafka.apache.org/documentation.html#newconsumerconfigs">Kafka documentation</a>
+for details</td>
+</tr>
+</tbody>
+</table>
+<p>Example for topic subscription by comma-separated topic list.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">tier1.sources.source1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.kafka.KafkaSource</span>
+<span class="na">tier1.sources.source1.channels</span> <span class="o">=</span> <span class="s">channel1</span>
+<span class="na">tier1.sources.source1.batchSize</span> <span class="o">=</span> <span class="s">5000</span>
+<span class="na">tier1.sources.source1.batchDurationMillis</span> <span class="o">=</span> <span class="s">2000</span>
+<span class="na">tier1.sources.source1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">localhost:9092</span>
+<span class="na">tier1.sources.source1.kafka.topics</span> <span class="o">=</span> <span class="s">test1, test2</span>
+<span class="na">tier1.sources.source1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">custom.g.id</span>
+</pre></div>
+</div>
+<p>Example for topic subscription by regex</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">tier1.sources.source1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.kafka.KafkaSource</span>
+<span class="na">tier1.sources.source1.channels</span> <span class="o">=</span> <span class="s">channel1</span>
+<span class="na">tier1.sources.source1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">localhost:9092</span>
+<span class="na">tier1.sources.source1.kafka.topics.regex</span> <span class="o">=</span> <span class="s">^topic[0-9]$</span>
+<span class="c"># the default kafka.consumer.group.id=flume is used</span>
+</pre></div>
+</div>
+<p><strong>Security and Kafka Source:</strong></p>
+<p>Secure authentication as well as data encryption is supported on the communication channel between Flume and Kafka.
+For secure authentication SASL/GSSAPI (Kerberos V5) or SSL (even though the parameter is named SSL, the actual protocol is a TLS implementation) can be used from Kafka version 0.9.0.</p>
+<p>As of now data encryption is solely provided by SSL/TLS.</p>
+<p>Setting <tt class="docutils literal"><span class="pre">kafka.consumer.security.protocol</span></tt> to any of the following value means:</p>
+<ul class="simple">
+<li><strong>SASL_PLAINTEXT</strong> - Kerberos or plaintext authentication with no data encryption</li>
+<li><strong>SASL_SSL</strong> - Kerberos or plaintext authentication with data encryption</li>
+<li><strong>SSL</strong> - TLS based encryption with optional authentication.</li>
+</ul>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">There is a performance degradation when SSL is enabled,
+the magnitude of which depends on the CPU type and the JVM implementation.
+Reference: <a class="reference external" href="http://kafka.apache.org/documentation#security_overview">Kafka security overview</a>
+and the jira for tracking this issue:
+<a class="reference external" href="https://issues.apache.org/jira/browse/KAFKA-2561">KAFKA-2561</a></p>
+</div>
+<p><strong>TLS and Kafka Source:</strong></p>
+<p>Please read the steps described in <a class="reference external" href="http://kafka.apache.org/documentation#security_configclients">Configuring Kafka Clients SSL</a>
+to learn about additional configuration settings for fine tuning for example any of the following:
+security provider, cipher suites, enabled protocols, truststore or keystore types.</p>
+<p>Example configuration with server side authentication and data encryption.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.source1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.kafka.KafkaSource</span>
+<span class="na">a1.sources.source1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.sources.source1.kafka.topics</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sources.source1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+<span class="na">a1.sources.source1.kafka.consumer.security.protocol</span> <span class="o">=</span> <span class="s">SSL</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.sources.source1.kafka.consumer.ssl.truststore.location</span><span class="o">=</span><span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.sources.source1.kafka.consumer.ssl.truststore.password</span><span class="o">=</span><span class="s">&lt;password to access the truststore&gt;</span>
+</pre></div>
+</div>
+<p>Specifying the truststore is optional here, the global truststore can be used instead.
+For more details about the global SSL setup, see the <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section.</p>
+<p>Note: By default the property <tt class="docutils literal"><span class="pre">ssl.endpoint.identification.algorithm</span></tt>
+is not defined, so hostname verification is not performed.
+In order to enable hostname verification, set the following properties</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.source1.kafka.consumer.ssl.endpoint.identification.algorithm</span><span class="o">=</span><span class="s">HTTPS</span>
+</pre></div>
+</div>
+<p>Once enabled, clients will verify the server&#8217;s fully qualified domain name (FQDN)
+against one of the following two fields:</p>
+<ol class="arabic simple">
+<li>Common Name (CN) <a class="reference external" href="https://tools.ietf.org/html/rfc6125#section-2.3">https://tools.ietf.org/html/rfc6125#section-2.3</a></li>
+<li>Subject Alternative Name (SAN) <a class="reference external" href="https://tools.ietf.org/html/rfc5280#section-4.2.1.6">https://tools.ietf.org/html/rfc5280#section-4.2.1.6</a></li>
+</ol>
+<p>If client side authentication is also required then additionally the following needs to be added to Flume agent
+configuration or the global SSL setup can be used (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section).
+Each Flume agent has to have its client certificate which has to be trusted by Kafka brokers either
+individually or by their signature chain. Common example is to sign each client certificate by a single Root CA
+which in turn is trusted by Kafka brokers.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># optional, the global keystore can be used alternatively</span>
+<span class="na">a1.sources.source1.kafka.consumer.ssl.keystore.location</span><span class="o">=</span><span class="s">/path/to/client.keystore.jks</span>
+<span class="na">a1.sources.source1.kafka.consumer.ssl.keystore.password</span><span class="o">=</span><span class="s">&lt;password to access the keystore&gt;</span>
+</pre></div>
+</div>
+<p>If keystore and key use different password protection then <tt class="docutils literal"><span class="pre">ssl.key.password</span></tt> property will
+provide the required additional secret for both consumer keystores:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.source1.kafka.consumer.ssl.key.password</span><span class="o">=</span><span class="s">&lt;password to access the key&gt;</span>
+</pre></div>
+</div>
+<p><strong>Kerberos and Kafka Source:</strong></p>
+<p>To use Kafka source with a Kafka cluster secured with Kerberos, set the <tt class="docutils literal"><span class="pre">consumer.security.protocol</span></tt> properties noted above for consumer.
+The Kerberos keytab and principal to be used with Kafka brokers is specified in a JAAS file&#8217;s &#8220;KafkaClient&#8221; section. &#8220;Client&#8221; section describes the Zookeeper connection if needed.
+See <a class="reference external" href="http://kafka.apache.org/documentation.html#security_sasl_clientconfig">Kafka doc</a>
+for information on the JAAS file contents. The location of this JAAS file and optionally the system wide kerberos configuration can be specified via JAVA_OPTS in flume-env.sh:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djava.security.krb5.conf=/path/to/krb5.conf&quot;</span>
+<span class="na">JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djava.security.auth.login.config=/path/to/flume_jaas.conf&quot;</span>
+</pre></div>
+</div>
+<p>Example secure configuration using SASL_PLAINTEXT:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.source1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.kafka.KafkaSource</span>
+<span class="na">a1.sources.source1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.sources.source1.kafka.topics</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sources.source1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+<span class="na">a1.sources.source1.kafka.consumer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_PLAINTEXT</span>
+<span class="na">a1.sources.source1.kafka.consumer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.sources.source1.kafka.consumer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+</pre></div>
+</div>
+<p>Example secure configuration using SASL_SSL:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.source1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.kafka.KafkaSource</span>
+<span class="na">a1.sources.source1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.sources.source1.kafka.topics</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sources.source1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+<span class="na">a1.sources.source1.kafka.consumer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_SSL</span>
+<span class="na">a1.sources.source1.kafka.consumer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.sources.source1.kafka.consumer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.sources.source1.kafka.consumer.ssl.truststore.location</span><span class="o">=</span><span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.sources.source1.kafka.consumer.ssl.truststore.password</span><span class="o">=</span><span class="s">&lt;password to access the truststore&gt;</span>
+</pre></div>
+</div>
+<p>Sample JAAS file. For reference of its content please see client config sections of the desired authentication mechanism (GSSAPI/PLAIN)
+in Kafka documentation of <a class="reference external" href="http://kafka.apache.org/documentation#security_sasl_clientconfig">SASL configuration</a>.
+Since the Kafka Source may also connect to Zookeeper for offset migration, the &#8220;Client&#8221; section was also added to this example.
+This won&#8217;t be needed unless you require offset migration, or you require this section for other secure components.
+Also please make sure that the operating system user of the Flume processes has read privileges on the jaas and keytab files.</p>
+<div class="highlight-javascript"><div class="highlight"><pre><span class="nx">Client</span> <span class="p">{</span>
+  <span class="nx">com</span><span class="p">.</span><span class="nb">sun</span><span class="p">.</span><span class="nx">security</span><span class="p">.</span><span class="nx">auth</span><span class="p">.</span><span class="nx">module</span><span class="p">.</span><span class="nx">Krb5LoginModule</span> <span class="nx">required</span>
+  <span class="nx">useKeyTab</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">storeKey</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">keyTab</span><span class="o">=</span><span class="s2">&quot;/path/to/keytabs/flume.keytab&quot;</span>
+  <span class="nx">principal</span><span class="o">=</span><span class="s2">&quot;flume/flumehost1.example.com@YOURKERBEROSREALM&quot;</span><span class="p">;</span>
+<span class="p">};</span>
+
+<span class="nx">KafkaClient</span> <span class="p">{</span>
+  <span class="nx">com</span><span class="p">.</span><span class="nb">sun</span><span class="p">.</span><span class="nx">security</span><span class="p">.</span><span class="nx">auth</span><span class="p">.</span><span class="nx">module</span><span class="p">.</span><span class="nx">Krb5LoginModule</span> <span class="nx">required</span>
+  <span class="nx">useKeyTab</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">storeKey</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">keyTab</span><span class="o">=</span><span class="s2">&quot;/path/to/keytabs/flume.keytab&quot;</span>
+  <span class="nx">principal</span><span class="o">=</span><span class="s2">&quot;flume/flumehost1.example.com@YOURKERBEROSREALM&quot;</span><span class="p">;</span>
+<span class="p">};</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="netcat-tcp-source">
+<h4>NetCat TCP Source<a class="headerlink" href="#netcat-tcp-source" title="Permalink to this headline">¶</a></h4>
+<p>A netcat-like source that listens on a given port and turns each line of text
+into an event. Acts like <tt class="docutils literal"><span class="pre">nc</span> <span class="pre">-k</span> <span class="pre">-l</span> <span class="pre">[host]</span> <span class="pre">[port]</span></tt>. In other words,
+it opens a specified port and listens for data. The expectation is that the
+supplied data is newline separated text. Each line of text is turned into a
+Flume event and sent via the connected channel.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="21%" />
+<col width="15%" />
+<col width="64%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">netcat</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>bind</strong></td>
+<td>&#8211;</td>
+<td>Host name or IP address to bind to</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>Port # to bind to</td>
+</tr>
+<tr class="row-even"><td>max-line-length</td>
+<td>512</td>
+<td>Max line length per event body (in bytes)</td>
+</tr>
+<tr class="row-odd"><td>ack-every-event</td>
+<td>true</td>
+<td>Respond with an &#8220;OK&#8221; for every event received</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>replicating</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">netcat</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">6666</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="netcat-udp-source">
+<h4>NetCat UDP Source<a class="headerlink" href="#netcat-udp-source" title="Permalink to this headline">¶</a></h4>
+<p>As per the original Netcat (TCP) source, this source that listens on a given
+port and turns each line of text into an event and sent via the connected channel.
+Acts like <tt class="docutils literal"><span class="pre">nc</span> <span class="pre">-u</span> <span class="pre">-k</span> <span class="pre">-l</span> <span class="pre">[host]</span> <span class="pre">[port]</span></tt>.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="24%" />
+<col width="14%" />
+<col width="63%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">netcatudp</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>bind</strong></td>
+<td>&#8211;</td>
+<td>Host name or IP address to bind to</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>Port # to bind to</td>
+</tr>
+<tr class="row-even"><td>remoteAddressHeader</td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>selector.type</td>
+<td>replicating</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-even"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-odd"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-even"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">netcatudp</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">6666</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="sequence-generator-source">
+<h4>Sequence Generator Source<a class="headerlink" href="#sequence-generator-source" title="Permalink to this headline">¶</a></h4>
+<p>A simple sequence generator that continuously generates events with a counter that starts from 0,
+increments by 1 and stops at totalEvents. Retries when it can&#8217;t send events to the channel. Useful
+mainly for testing. During retries it keeps the body of the retried messages the same as before so
+that the number of unique events - after de-duplication at destination - is expected to be
+equal to the specified <tt class="docutils literal"><span class="pre">totalEvents</span></tt>. Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="16%" />
+<col width="18%" />
+<col width="66%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">seq</span></tt></td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>replicating</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>1</td>
+<td>Number of events to attempt to process per request loop.</td>
+</tr>
+<tr class="row-odd"><td>totalEvents</td>
+<td>Long.MAX_VALUE</td>
+<td>Number of unique events sent by the source.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">seq</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="syslog-sources">
+<h4>Syslog Sources<a class="headerlink" href="#syslog-sources" title="Permalink to this headline">¶</a></h4>
+<p>Reads syslog data and generate Flume events. The UDP source treats an entire
+message as a single event. The TCP sources create a new event for each string
+of characters separated by a newline (&#8216;n&#8217;).</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<div class="section" id="syslog-tcp-source">
+<h5>Syslog TCP Source<a class="headerlink" href="#syslog-tcp-source" title="Permalink to this headline">¶</a></h5>
+<p>The original, tried-and-true syslog TCP source.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="16%" />
+<col width="9%" />
+<col width="75%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">syslogtcp</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>host</strong></td>
+<td>&#8211;</td>
+<td>Host name or IP address to bind to</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>Port # to bind to</td>
+</tr>
+<tr class="row-even"><td>eventSize</td>
+<td>2500</td>
+<td>Maximum size of a single event line, in bytes</td>
+</tr>
+<tr class="row-odd"><td>keepFields</td>
+<td>none</td>
+<td>Setting this to &#8216;all&#8217; will preserve the Priority,
+Timestamp and Hostname in the body of the event.
+A spaced separated list of fields to include
+is allowed as well. Currently, the following
+fields can be included: priority, version,
+timestamp, hostname. The values &#8216;true&#8217; and &#8216;false&#8217;
+have been deprecated in favor of &#8216;all&#8217; and &#8216;none&#8217;.</td>
+</tr>
+<tr class="row-even"><td>clientIPHeader</td>
+<td>&#8211;</td>
+<td>If specified, the IP address of the client will be stored in
+the header of each event using the header name specified here.
+This allows for interceptors and channel selectors to customize
+routing logic based on the IP address of the client.
+Do not use the standard Syslog header names here (like _host_)
+because the event header will be overridden in that case.</td>
+</tr>
+<tr class="row-odd"><td>clientHostnameHeader</td>
+<td>&#8211;</td>
+<td>If specified, the host name of the client will be stored in
+the header of each event using the header name specified here.
+This allows for interceptors and channel selectors to customize
+routing logic based on the host name of the client.
+Retrieving the host name may involve a name service reverse
+lookup which may affect the performance.
+Do not use the standard Syslog header names here (like _host_)
+because the event header will be overridden in that case.</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>replicating</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>ssl</td>
+<td>false</td>
+<td>Set this to true to enable SSL encryption. If SSL is enabled,
+you must also specify a &#8220;keystore&#8221; and a &#8220;keystore-password&#8221;,
+either through component level parameters (see below)
+or as global SSL parameters (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section).</td>
+</tr>
+<tr class="row-odd"><td>keystore</td>
+<td>&#8211;</td>
+<td>This is the path to a Java keystore file.
+If not specified here, then the global keystore will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-even"><td>keystore-password</td>
+<td>&#8211;</td>
+<td>The password for the Java keystore.
+If not specified here, then the global keystore password will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-odd"><td>keystore-type</td>
+<td>JKS</td>
+<td>The type of the Java keystore. This can be &#8220;JKS&#8221; or &#8220;PKCS12&#8221;.
+If not specified here, then the global keystore type will be used
+(if defined, otherwise the default is JKS).</td>
+</tr>
+<tr class="row-even"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude.
+SSLv3 will always be excluded in addition to the protocols specified.</td>
+</tr>
+<tr class="row-odd"><td>include-protocols</td>
+<td>&#8211;</td>
+<td>Space-separated list of SSL/TLS protocols to include.
+The enabled protocols will be the included protocols without the excluded protocols.
+If included-protocols is empty, it includes every supported protocols.</td>
+</tr>
+<tr class="row-even"><td>exclude-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to exclude.</td>
+</tr>
+<tr class="row-odd"><td>include-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to include.
+The enabled cipher suites will be the included cipher suites without the excluded cipher suites.
+If included-cipher-suites is empty, it includes every supported cipher suites.</td>
+</tr>
+</tbody>
+</table>
+<p>For example, a syslog TCP source for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">syslogtcp</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">5140</span>
+<span class="na">a1.sources.r1.host</span> <span class="o">=</span> <span class="s">localhost</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="multiport-syslog-tcp-source">
+<h5>Multiport Syslog TCP Source<a class="headerlink" href="#multiport-syslog-tcp-source" title="Permalink to this headline">¶</a></h5>
+<p>This is a newer, faster, multi-port capable version of the Syslog TCP source.
+Note that the <tt class="docutils literal"><span class="pre">ports</span></tt> configuration setting has replaced <tt class="docutils literal"><span class="pre">port</span></tt>.
+Multi-port capability means that it can listen on many ports at once in an
+efficient manner. This source uses the Apache Mina library to do that.
+Provides support for RFC-3164 and many common RFC-5424 formatted messages.
+Also provides the capability to configure the character set used on a per-port
+basis.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="8%" />
+<col width="6%" />
+<col width="86%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">multiport_syslogtcp</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>host</strong></td>
+<td>&#8211;</td>
+<td>Host name or IP address to bind to.</td>
+</tr>
+<tr class="row-odd"><td><strong>ports</strong></td>
+<td>&#8211;</td>
+<td>Space-separated list (one or more) of ports to bind to.</td>
+</tr>
+<tr class="row-even"><td>eventSize</td>
+<td>2500</td>
+<td>Maximum size of a single event line, in bytes.</td>
+</tr>
+<tr class="row-odd"><td>keepFields</td>
+<td>none</td>
+<td>Setting this to &#8216;all&#8217; will preserve the
+Priority, Timestamp and Hostname in the body of the event.
+A spaced separated list of fields to include
+is allowed as well. Currently, the following
+fields can be included: priority, version,
+timestamp, hostname. The values &#8216;true&#8217; and &#8216;false&#8217;
+have been deprecated in favor of &#8216;all&#8217; and &#8216;none&#8217;.</td>
+</tr>
+<tr class="row-even"><td>portHeader</td>
+<td>&#8211;</td>
+<td>If specified, the port number will be stored in the header of each event using the header name specified here. This allows for interceptors and channel selectors to customize routing logic based on the incoming port.</td>
+</tr>
+<tr class="row-odd"><td>clientIPHeader</td>
+<td>&#8211;</td>
+<td>If specified, the IP address of the client will be stored in
+the header of each event using the header name specified here.
+This allows for interceptors and channel selectors to customize
+routing logic based on the IP address of the client.
+Do not use the standard Syslog header names here (like _host_)
+because the event header will be overridden in that case.</td>
+</tr>
+<tr class="row-even"><td>clientHostnameHeader</td>
+<td>&#8211;</td>
+<td>If specified, the host name of the client will be stored in
+the header of each event using the header name specified here.
+This allows for interceptors and channel selectors to customize
+routing logic based on the host name of the client.
+Retrieving the host name may involve a name service reverse
+lookup which may affect the performance.
+Do not use the standard Syslog header names here (like _host_)
+because the event header will be overridden in that case.</td>
+</tr>
+<tr class="row-odd"><td>charset.default</td>
+<td>UTF-8</td>
+<td>Default character set used while parsing syslog events into strings.</td>
+</tr>
+<tr class="row-even"><td>charset.port.&lt;port&gt;</td>
+<td>&#8211;</td>
+<td>Character set is configurable on a per-port basis.</td>
+</tr>
+<tr class="row-odd"><td>batchSize</td>
+<td>100</td>
+<td>Maximum number of events to attempt to process per request loop. Using the default is usually fine.</td>
+</tr>
+<tr class="row-even"><td>readBufferSize</td>
+<td>1024</td>
+<td>Size of the internal Mina read buffer. Provided for performance tuning. Using the default is usually fine.</td>
+</tr>
+<tr class="row-odd"><td>numProcessors</td>
+<td>(auto-detected)</td>
+<td>Number of processors available on the system for use while processing messages. Default is to auto-detect # of CPUs using the Java Runtime API. Mina will spawn 2 request-processing threads per detected CPU, which is often reasonable.</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>replicating</td>
+<td>replicating, multiplexing, or custom</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>&#8211;</td>
+<td>Depends on the <tt class="docutils literal"><span class="pre">selector.type</span></tt> value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors.</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>ssl</td>
+<td>false</td>
+<td>Set this to true to enable SSL encryption. If SSL is enabled,
+you must also specify a &#8220;keystore&#8221; and a &#8220;keystore-password&#8221;,
+either through component level parameters (see below)
+or as global SSL parameters (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section).</td>
+</tr>
+<tr class="row-odd"><td>keystore</td>
+<td>&#8211;</td>
+<td>This is the path to a Java keystore file.
+If not specified here, then the global keystore will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-even"><td>keystore-password</td>
+<td>&#8211;</td>
+<td>The password for the Java keystore.
+If not specified here, then the global keystore password will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-odd"><td>keystore-type</td>
+<td>JKS</td>
+<td>The type of the Java keystore. This can be &#8220;JKS&#8221; or &#8220;PKCS12&#8221;.
+If not specified here, then the global keystore type will be used
+(if defined, otherwise the default is JKS).</td>
+</tr>
+<tr class="row-even"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude.
+SSLv3 will always be excluded in addition to the protocols specified.</td>
+</tr>
+<tr class="row-odd"><td>include-protocols</td>
+<td>&#8211;</td>
+<td>Space-separated list of SSL/TLS protocols to include.
+The enabled protocols will be the included protocols without the excluded protocols.
+If included-protocols is empty, it includes every supported protocols.</td>
+</tr>
+<tr class="row-even"><td>exclude-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to exclude.</td>
+</tr>
+<tr class="row-odd"><td>include-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to include.
+The enabled cipher suites will be the included cipher suites without the excluded cipher suites.
+If included-cipher-suites is empty, it includes every supported cipher suites.</td>
+</tr>
+</tbody>
+</table>
+<p>For example, a multiport syslog TCP source for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">multiport_syslogtcp</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.host</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.ports</span> <span class="o">=</span> <span class="s">10001 10002 10003</span>
+<span class="na">a1.sources.r1.portHeader</span> <span class="o">=</span> <span class="s">port</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="syslog-udp-source">
+<h5>Syslog UDP Source<a class="headerlink" href="#syslog-udp-source" title="Permalink to this headline">¶</a></h5>
+<table border="1" class="docutils">
+<colgroup>
+<col width="21%" />
+<col width="12%" />
+<col width="67%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">syslogudp</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>host</strong></td>
+<td>&#8211;</td>
+<td>Host name or IP address to bind to</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>Port # to bind to</td>
+</tr>
+<tr class="row-even"><td>keepFields</td>
+<td>false</td>
+<td>Setting this to true will preserve the Priority,
+Timestamp and Hostname in the body of the event.</td>
+</tr>
+<tr class="row-odd"><td>clientIPHeader</td>
+<td>&#8211;</td>
+<td>If specified, the IP address of the client will be stored in
+the header of each event using the header name specified here.
+This allows for interceptors and channel selectors to customize
+routing logic based on the IP address of the client.
+Do not use the standard Syslog header names here (like _host_)
+because the event header will be overridden in that case.</td>
+</tr>
+<tr class="row-even"><td>clientHostnameHeader</td>
+<td>&#8211;</td>
+<td>If specified, the host name of the client will be stored in
+the header of each event using the header name specified here.
+This allows for interceptors and channel selectors to customize
+routing logic based on the host name of the client.
+Retrieving the host name may involve a name service reverse
+lookup which may affect the performance.
+Do not use the standard Syslog header names here (like _host_)
+because the event header will be overridden in that case.</td>
+</tr>
+<tr class="row-odd"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-even"><td>selector.*</td>
+<td>replicating</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-odd"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-even"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>For example, a syslog UDP source for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">syslogudp</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">5140</span>
+<span class="na">a1.sources.r1.host</span> <span class="o">=</span> <span class="s">localhost</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="http-source">
+<h4>HTTP Source<a class="headerlink" href="#http-source" title="Permalink to this headline">¶</a></h4>
+<p>A source which accepts Flume Events by HTTP POST and GET. GET should be used
+for experimentation only. HTTP requests are converted into flume events by
+a pluggable &#8220;handler&#8221; which must implement the HTTPSourceHandler interface.
+This handler takes a HttpServletRequest and returns a list of
+flume events. All events handled from one Http request are committed to the channel
+in one transaction, thus allowing for increased efficiency on channels like
+the file channel. If the handler throws an exception, this source will
+return a HTTP status of 400. If the channel is full, or the source is unable to
+append events to the channel, the source will return a HTTP 503 - Temporarily
+unavailable status.</p>
+<p>All events sent in one post request are considered to be one batch and
+inserted into the channel in one transaction.</p>
+<p>This source is based on Jetty 9.4 and offers the ability to set additional
+Jetty-specific parameters which will be passed directly to the Jetty components.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="13%" />
+<col width="27%" />
+<col width="60%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&nbsp;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">http</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>The port the source should bind to.</td>
+</tr>
+<tr class="row-even"><td>bind</td>
+<td>0.0.0.0</td>
+<td>The hostname or IP address to listen on</td>
+</tr>
+<tr class="row-odd"><td>handler</td>
+<td><tt class="docutils literal"><span class="pre">org.apache.flume.source.http.JSONHandler</span></tt></td>
+<td>The FQCN of the handler class.</td>
+</tr>
+<tr class="row-even"><td>handler.*</td>
+<td>&#8211;</td>
+<td>Config parameters for the handler</td>
+</tr>
+<tr class="row-odd"><td>selector.type</td>
+<td>replicating</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-even"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-odd"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-even"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>ssl</td>
+<td>false</td>
+<td>Set the property true, to enable SSL. <em>HTTP Source does not support SSLv3.</em></td>
+</tr>
+<tr class="row-even"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude.
+SSLv3 will always be excluded in addition to the protocols specified.</td>
+</tr>
+<tr class="row-odd"><td>include-protocols</td>
+<td>&#8211;</td>
+<td>Space-separated list of SSL/TLS protocols to include.
+The enabled protocols will be the included protocols without the excluded protocols.
+If included-protocols is empty, it includes every supported protocols.</td>
+</tr>
+<tr class="row-even"><td>exclude-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to exclude.</td>
+</tr>
+<tr class="row-odd"><td>include-cipher-suites</td>
+<td>&#8211;</td>
+<td>Space-separated list of cipher suites to include.
+The enabled cipher suites will be the included cipher suites without the excluded cipher suites.</td>
+</tr>
+<tr class="row-even"><td>keystore</td>
+<td>&nbsp;</td>
+<td>Location of the keystore including keystore file name.
+If SSL is enabled but the keystore is not specified here,
+then the global keystore will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-odd"><td>keystore-password</td>
+<td>&nbsp;</td>
+<td>Keystore password.
+If SSL is enabled but the keystore password is not specified here,
+then the global keystore password will be used
+(if defined, otherwise configuration error).</td>
+</tr>
+<tr class="row-even"><td>keystore-type</td>
+<td>JKS</td>
+<td>Keystore type. This can be &#8220;JKS&#8221; or &#8220;PKCS12&#8221;.</td>
+</tr>
+<tr class="row-odd"><td>QueuedThreadPool.*</td>
+<td>&nbsp;</td>
+<td>Jetty specific settings to be set on org.eclipse.jetty.util.thread.QueuedThreadPool.
+N.B. QueuedThreadPool will only be used if at least one property of this class is set.</td>
+</tr>
+<tr class="row-even"><td>HttpConfiguration.*</td>
+<td>&nbsp;</td>
+<td>Jetty specific settings to be set on org.eclipse.jetty.server.HttpConfiguration</td>
+</tr>
+<tr class="row-odd"><td>SslContextFactory.*</td>
+<td>&nbsp;</td>
+<td>Jetty specific settings to be set on org.eclipse.jetty.util.ssl.SslContextFactory (only
+applicable when <em>ssl</em> is set to true).</td>
+</tr>
+<tr class="row-even"><td>ServerConnector.*</td>
+<td>&nbsp;</td>
+<td>Jetty specific settings to be set on org.eclipse.jetty.server.ServerConnector</td>
+</tr>
+</tbody>
+</table>
+<p>Deprecated Properties</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="13%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>keystorePassword</td>
+<td>&#8211;</td>
+<td>Use <em>keystore-password</em>. Deprecated value will be overwritten with the new one.</td>
+</tr>
+<tr class="row-odd"><td>excludeProtocols</td>
+<td>SSLv3</td>
+<td>Use <em>exclude-protocols</em>. Deprecated value will be overwritten with the new one.</td>
+</tr>
+<tr class="row-even"><td>enableSSL</td>
+<td>false</td>
+<td>Use <em>ssl</em>. Deprecated value will be overwritten with the new one.</td>
+</tr>
+</tbody>
+</table>
+<p>N.B. Jetty-specific settings are set using the setter-methods on the objects listed above. For full details see the Javadoc for these classes
+(<a class="reference external" href="http://www.eclipse.org/jetty/javadoc/9.4.6.v20170531/org/eclipse/jetty/util/thread/QueuedThreadPool.html">QueuedThreadPool</a>,
+<a class="reference external" href="http://www.eclipse.org/jetty/javadoc/9.4.6.v20170531/org/eclipse/jetty/server/HttpConfiguration.html">HttpConfiguration</a>,
+<a class="reference external" href="http://www.eclipse.org/jetty/javadoc/9.4.6.v20170531/org/eclipse/jetty/util/ssl/SslContextFactory.html">SslContextFactory</a> and
+<a class="reference external" href="http://www.eclipse.org/jetty/javadoc/9.4.6.v20170531/org/eclipse/jetty/server/ServerConnector.html">ServerConnector</a>).</p>
+<p>When using Jetty-specific setings, named properites above will take precedence (for example excludeProtocols will take
+precedence over SslContextFactory.ExcludeProtocols). All properties will be inital lower case.</p>
+<p>An example http source for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">http</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">5140</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.handler</span> <span class="o">=</span> <span class="s">org.example.rest.RestHandler</span>
+<span class="na">a1.sources.r1.handler.nickname</span> <span class="o">=</span> <span class="s">random props</span>
+<span class="na">a1.sources.r1.HttpConfiguration.sendServerVersion</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sources.r1.ServerConnector.idleTimeout</span> <span class="o">=</span> <span class="s">300</span>
+</pre></div>
+</div>
+<div class="section" id="jsonhandler">
+<h5>JSONHandler<a class="headerlink" href="#jsonhandler" title="Permalink to this headline">¶</a></h5>
+<p>A handler is provided out of the box which can handle events represented in
+JSON format, and supports UTF-8, UTF-16 and UTF-32 character sets. The handler
+accepts an array of events (even if there is only one event, the event has to be
+sent in an array) and converts them to a Flume event based on the
+encoding specified in the request. If no encoding is specified, UTF-8 is assumed.
+The JSON handler supports UTF-8, UTF-16 and UTF-32.
+Events are represented as follows.</p>
+<div class="highlight-javascript"><div class="highlight"><pre><span class="p">[{</span>
+  <span class="s2">&quot;headers&quot;</span> <span class="o">:</span> <span class="p">{</span>
+             <span class="s2">&quot;timestamp&quot;</span> <span class="o">:</span> <span class="s2">&quot;434324343&quot;</span><span class="p">,</span>
+             <span class="s2">&quot;host&quot;</span> <span class="o">:</span> <span class="s2">&quot;random_host.example.com&quot;</span>
+             <span class="p">},</span>
+  <span class="s2">&quot;body&quot;</span> <span class="o">:</span> <span class="s2">&quot;random_body&quot;</span>
+  <span class="p">},</span>
+  <span class="p">{</span>
+  <span class="s2">&quot;headers&quot;</span> <span class="o">:</span> <span class="p">{</span>
+             <span class="s2">&quot;namenode&quot;</span> <span class="o">:</span> <span class="s2">&quot;namenode.example.com&quot;</span><span class="p">,</span>
+             <span class="s2">&quot;datanode&quot;</span> <span class="o">:</span> <span class="s2">&quot;random_datanode.example.com&quot;</span>
+             <span class="p">},</span>
+  <span class="s2">&quot;body&quot;</span> <span class="o">:</span> <span class="s2">&quot;really_random_body&quot;</span>
+  <span class="p">}]</span>
+</pre></div>
+</div>
+<p>To set the charset, the request must have content type specified as
+<tt class="docutils literal"><span class="pre">application/json;</span> <span class="pre">charset=UTF-8</span></tt> (replace UTF-8 with UTF-16 or UTF-32 as
+required).</p>
+<p>One way to create an event in the format expected by this handler is to
+use JSONEvent provided in the Flume SDK and use Google Gson to create the JSON
+string using the Gson#fromJson(Object, Type)
+method. The type token to pass as the 2nd argument of this method
+for list of events can be created by:</p>
+<div class="highlight-java"><div class="highlight"><pre><span class="n">Type</span> <span class="n">type</span> <span class="o">=</span> <span class="k">new</span> <span class="n">TypeToken</span><span class="o">&lt;</span><span class="n">List</span><span class="o">&lt;</span><span class="n">JSONEvent</span><span class="o">&gt;&gt;()</span> <span class="o">{}.</span><span class="na">getType</span><span class="o">();</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="blobhandler">
+<h5>BlobHandler<a class="headerlink" href="#blobhandler" title="Permalink to this headline">¶</a></h5>
+<p>By default HTTPSource splits JSON input into Flume events. As an alternative, BlobHandler is a handler for HTTPSource that returns an event that contains the request parameters as well as the Binary Large Object (BLOB) uploaded with this request. For example a PDF or JPG file. Note that this approach is not suitable for very large objects because it buffers up the entire BLOB in RAM.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="16%" />
+<col width="66%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>handler</strong></td>
+<td>&#8211;</td>
+<td>The FQCN of this class: <tt class="docutils literal"><span class="pre">org.apache.flume.sink.solr.morphline.BlobHandler</span></tt></td>
+</tr>
+<tr class="row-odd"><td>handler.maxBlobLength</td>
+<td>100000000</td>
+<td>The maximum number of bytes to read and buffer for a given request</td>
+</tr>
+</tbody>
+</table>
+</div>
+</div>
+<div class="section" id="stress-source">
+<h4>Stress Source<a class="headerlink" href="#stress-source" title="Permalink to this headline">¶</a></h4>
+<p>StressSource is an internal load-generating source implementation which is very useful for
+stress tests. It allows User to configure the size of Event payload, with empty headers.
+User can configure total number of events to be sent as well maximum number of Successful
+Event to be delivered.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="10%" />
+<col width="73%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.source.StressSource</span></tt></td>
+</tr>
+<tr class="row-odd"><td>size</td>
+<td>500</td>
+<td>Payload size of each Event. Unit:<strong>byte</strong></td>
+</tr>
+<tr class="row-even"><td>maxTotalEvents</td>
+<td>-1</td>
+<td>Maximum number of Events to be sent</td>
+</tr>
+<tr class="row-odd"><td>maxSuccessfulEvents</td>
+<td>-1</td>
+<td>Maximum number of Events successfully sent</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>1</td>
+<td>Number of Events to be sent in one batch</td>
+</tr>
+<tr class="row-odd"><td>maxEventsPerSecond</td>
+<td>0</td>
+<td>When set to an integer greater than zero, enforces a rate limiter onto the source.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named <strong>a1</strong>:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">stresssource-1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">memoryChannel-1</span>
+<span class="na">a1.sources.stresssource-1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.StressSource</span>
+<span class="na">a1.sources.stresssource-1.size</span> <span class="o">=</span> <span class="s">10240</span>
+<span class="na">a1.sources.stresssource-1.maxTotalEvents</span> <span class="o">=</span> <span class="s">1000000</span>
+<span class="na">a1.sources.stresssource-1.channels</span> <span class="o">=</span> <span class="s">memoryChannel-1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="legacy-sources">
+<h4>Legacy Sources<a class="headerlink" href="#legacy-sources" title="Permalink to this headline">¶</a></h4>
+<p>The legacy sources allow a Flume 1.x agent to receive events from Flume 0.9.4
+agents. It accepts events in the Flume 0.9.4 format, converts them to the Flume
+1.0 format, and stores them in the connected channel. The 0.9.4 event
+properties like timestamp, pri, host, nanos, etc get converted to 1.x event
+header attributes. The legacy source supports both Avro and Thrift RPC
+connections. To use this bridge between two Flume versions, you need to start a
+Flume 1.x agent with the avroLegacy or thriftLegacy source. The 0.9.4 agent
+should have the agent Sink pointing to the host/port of the 1.x agent.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">The reliability semantics of Flume 1.x are different from that of
+Flume 0.9.x. The E2E or DFO mode of a Flume 0.9.x agent will not be
+supported by the legacy source. The only supported 0.9.x mode is the
+best effort, though the reliability setting of the 1.x flow will be
+applicable to the events once they are saved into the Flume 1.x
+channel by the legacy source.</p>
+</div>
+<p>Required properties are in <strong>bold</strong>.</p>
+<div class="section" id="avro-legacy-source">
+<h5>Avro Legacy Source<a class="headerlink" href="#avro-legacy-source" title="Permalink to this headline">¶</a></h5>
+<table border="1" class="docutils">
+<colgroup>
+<col width="12%" />
+<col width="9%" />
+<col width="79%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.source.avroLegacy.AvroLegacySource</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>host</strong></td>
+<td>&#8211;</td>
+<td>The hostname or IP address to bind to</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>The port # to listen on</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>replicating</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.avroLegacy.AvroLegacySource</span>
+<span class="na">a1.sources.r1.host</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">6666</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="thrift-legacy-source">
+<h5>Thrift Legacy Source<a class="headerlink" href="#thrift-legacy-source" title="Permalink to this headline">¶</a></h5>
+<table border="1" class="docutils">
+<colgroup>
+<col width="12%" />
+<col width="9%" />
+<col width="79%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.source.thriftLegacy.ThriftLegacySource</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>host</strong></td>
+<td>&#8211;</td>
+<td>The hostname or IP address to bind to</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>The port # to listen on</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>replicating or multiplexing</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>replicating</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.thriftLegacy.ThriftLegacySource</span>
+<span class="na">a1.sources.r1.host</span> <span class="o">=</span> <span class="s">0.0.0.0</span>
+<span class="na">a1.sources.r1.bind</span> <span class="o">=</span> <span class="s">6666</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="custom-source">
+<h4>Custom Source<a class="headerlink" href="#custom-source" title="Permalink to this headline">¶</a></h4>
+<p>A custom source is your own implementation of the Source interface. A custom
+source&#8217;s class and its dependencies must be included in the agent&#8217;s classpath
+when starting the Flume agent. The type of the custom source is its FQCN.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="15%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channels</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be your FQCN</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>&nbsp;</td>
+<td><tt class="docutils literal"><span class="pre">replicating</span></tt> or <tt class="docutils literal"><span class="pre">multiplexing</span></tt></td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>replicating</td>
+<td>Depends on the selector.type value</td>
+</tr>
+<tr class="row-even"><td>interceptors</td>
+<td>&#8211;</td>
+<td>Space-separated list of interceptors</td>
+</tr>
+<tr class="row-odd"><td>interceptors.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">org.example.MySource</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="scribe-source">
+<h4>Scribe Source<a class="headerlink" href="#scribe-source" title="Permalink to this headline">¶</a></h4>
+<p>Scribe is another type of ingest system. To adopt existing Scribe ingest system,
+Flume should use ScribeSource based on Thrift with compatible transfering protocol.
+For deployment of Scribe please follow the guide from Facebook.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="10%" />
+<col width="73%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.source.scribe.ScribeSource</span></tt></td>
+</tr>
+<tr class="row-odd"><td>port</td>
+<td>1499</td>
+<td>Port that Scribe should be connected</td>
+</tr>
+<tr class="row-even"><td>maxReadBufferBytes</td>
+<td>16384000</td>
+<td>Thrift Default FrameBuffer Size</td>
+</tr>
+<tr class="row-odd"><td>workerThreads</td>
+<td>5</td>
+<td>Handing threads number in Thrift</td>
+</tr>
+<tr class="row-even"><td>selector.type</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>selector.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.source.scribe.ScribeSource</span>
+<span class="na">a1.sources.r1.port</span> <span class="o">=</span> <span class="s">1463</span>
+<span class="na">a1.sources.r1.workerThreads</span> <span class="o">=</span> <span class="s">5</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="flume-sinks">
+<h3>Flume Sinks<a class="headerlink" href="#flume-sinks" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="hdfs-sink">
+<h4>HDFS Sink<a class="headerlink" href="#hdfs-sink" title="Permalink to this headline">¶</a></h4>
+<p>This sink writes events into the Hadoop Distributed File System (HDFS). It
+currently supports creating text and sequence files. It supports compression in
+both file types. The files can be rolled (close current file and create a new
+one) periodically based on the elapsed time or size of data or number of events.
+It also buckets/partitions data by attributes like timestamp or machine
+where the event originated. The HDFS directory path may contain formatting
+escape sequences that will replaced by the HDFS sink to generate a
+directory/file name to store the events. Using this sink requires hadoop to be
+installed so that Flume can use the Hadoop jars to communicate with the HDFS
+cluster. Note that a version of Hadoop that supports the sync() call is
+required.</p>
+<p>The following are the escape sequences supported:</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="15%" />
+<col width="85%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Alias</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>%{host}</td>
+<td>Substitute value of event header named &#8220;host&#8221;. Arbitrary header names are supported.</td>
+</tr>
+<tr class="row-odd"><td>%t</td>
+<td>Unix time in milliseconds</td>
+</tr>
+<tr class="row-even"><td>%a</td>
+<td>locale&#8217;s short weekday name (Mon, Tue, ...)</td>
+</tr>
+<tr class="row-odd"><td>%A</td>
+<td>locale&#8217;s full weekday name (Monday, Tuesday, ...)</td>
+</tr>
+<tr class="row-even"><td>%b</td>
+<td>locale&#8217;s short month name (Jan, Feb, ...)</td>
+</tr>
+<tr class="row-odd"><td>%B</td>
+<td>locale&#8217;s long month name (January, February, ...)</td>
+</tr>
+<tr class="row-even"><td>%c</td>
+<td>locale&#8217;s date and time (Thu Mar 3 23:05:25 2005)</td>
+</tr>
+<tr class="row-odd"><td>%d</td>
+<td>day of month (01)</td>
+</tr>
+<tr class="row-even"><td>%e</td>
+<td>day of month without padding (1)</td>
+</tr>
+<tr class="row-odd"><td>%D</td>
+<td>date; same as %m/%d/%y</td>
+</tr>
+<tr class="row-even"><td>%H</td>
+<td>hour (00..23)</td>
+</tr>
+<tr class="row-odd"><td>%I</td>
+<td>hour (01..12)</td>
+</tr>
+<tr class="row-even"><td>%j</td>
+<td>day of year (001..366)</td>
+</tr>
+<tr class="row-odd"><td>%k</td>
+<td>hour ( 0..23)</td>
+</tr>
+<tr class="row-even"><td>%m</td>
+<td>month (01..12)</td>
+</tr>
+<tr class="row-odd"><td>%n</td>
+<td>month without padding (1..12)</td>
+</tr>
+<tr class="row-even"><td>%M</td>
+<td>minute (00..59)</td>
+</tr>
+<tr class="row-odd"><td>%p</td>
+<td>locale&#8217;s equivalent of am or pm</td>
+</tr>
+<tr class="row-even"><td>%s</td>
+<td>seconds since 1970-01-01 00:00:00 UTC</td>
+</tr>
+<tr class="row-odd"><td>%S</td>
+<td>second (00..59)</td>
+</tr>
+<tr class="row-even"><td>%y</td>
+<td>last two digits of year (00..99)</td>
+</tr>
+<tr class="row-odd"><td>%Y</td>
+<td>year (2010)</td>
+</tr>
+<tr class="row-even"><td>%z</td>
+<td>+hhmm numeric timezone (for example, -0400)</td>
+</tr>
+<tr class="row-odd"><td>%[localhost]</td>
+<td>Substitute the hostname of the host where the agent is running</td>
+</tr>
+<tr class="row-even"><td>%[IP]</td>
+<td>Substitute the IP address of the host where the agent is running</td>
+</tr>
+<tr class="row-odd"><td>%[FQDN]</td>
+<td>Substitute the canonical hostname of the host where the agent is running</td>
+</tr>
+</tbody>
+</table>
+<p>Note: The escape strings %[localhost], %[IP] and %[FQDN] all rely on Java&#8217;s ability to obtain the
+hostname, which may fail in some networking environments.</p>
+<p>The file in use will have the name mangled to include &#8221;.tmp&#8221; at the end. Once
+the file is closed, this extension is removed. This allows excluding partially
+complete files in the directory.
+Required properties are in <strong>bold</strong>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">For all of the time related escape sequences, a header with the key
+&#8220;timestamp&#8221; must exist among the headers of the event (unless <tt class="docutils literal"><span class="pre">hdfs.useLocalTimeStamp</span></tt> is set to <tt class="docutils literal"><span class="pre">true</span></tt>). One way to add
+this automatically is to use the TimestampInterceptor.</p>
+</div>
+<table border="1" class="docutils">
+<colgroup>
+<col width="8%" />
+<col width="4%" />
+<col width="88%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">hdfs</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>hdfs.path</strong></td>
+<td>&#8211;</td>
+<td>HDFS directory path (eg hdfs://namenode/flume/webdata/)</td>
+</tr>
+<tr class="row-odd"><td>hdfs.filePrefix</td>
+<td>FlumeData</td>
+<td>Name prefixed to files created by Flume in hdfs directory</td>
+</tr>
+<tr class="row-even"><td>hdfs.fileSuffix</td>
+<td>&#8211;</td>
+<td>Suffix to append to file (eg <tt class="docutils literal"><span class="pre">.avro</span></tt> - <em>NOTE: period is not automatically added</em>)</td>
+</tr>
+<tr class="row-odd"><td>hdfs.inUsePrefix</td>
+<td>&#8211;</td>
+<td>Prefix that is used for temporal files that flume actively writes into</td>
+</tr>
+<tr class="row-even"><td>hdfs.inUseSuffix</td>
+<td><tt class="docutils literal"><span class="pre">.tmp</span></tt></td>
+<td>Suffix that is used for temporal files that flume actively writes into</td>
+</tr>
+<tr class="row-odd"><td>hdfs.emptyInUseSuffix</td>
+<td>false</td>
+<td>If <tt class="docutils literal"><span class="pre">false</span></tt> an <tt class="docutils literal"><span class="pre">hdfs.inUseSuffix</span></tt> is used while writing the output. After closing the output <tt class="docutils literal"><span class="pre">hdfs.inUseSuffix</span></tt> is removed from the output file name. If <tt class="docutils literal"><span class="pre">true</span></tt> the <tt class="docutils literal"><span class="pre">hdfs.inUseSuffix</span></tt> parameter is ignored  [...]
+</tr>
+<tr class="row-even"><td>hdfs.rollInterval</td>
+<td>30</td>
+<td>Number of seconds to wait before rolling current file
+(0 = never roll based on time interval)</td>
+</tr>
+<tr class="row-odd"><td>hdfs.rollSize</td>
+<td>1024</td>
+<td>File size to trigger roll, in bytes (0: never roll based on file size)</td>
+</tr>
+<tr class="row-even"><td>hdfs.rollCount</td>
+<td>10</td>
+<td>Number of events written to file before it rolled
+(0 = never roll based on number of events)</td>
+</tr>
+<tr class="row-odd"><td>hdfs.idleTimeout</td>
+<td>0</td>
+<td>Timeout after which inactive files get closed
+(0 = disable automatic closing of idle files)</td>
+</tr>
+<tr class="row-even"><td>hdfs.batchSize</td>
+<td>100</td>
+<td>number of events written to file before it is flushed to HDFS</td>
+</tr>
+<tr class="row-odd"><td>hdfs.codeC</td>
+<td>&#8211;</td>
+<td>Compression codec. one of following : gzip, bzip2, lzo, lzop, snappy</td>
+</tr>
+<tr class="row-even"><td>hdfs.fileType</td>
+<td>SequenceFile</td>
+<td>File format: currently <tt class="docutils literal"><span class="pre">SequenceFile</span></tt>, <tt class="docutils literal"><span class="pre">DataStream</span></tt> or <tt class="docutils literal"><span class="pre">CompressedStream</span></tt>
+(1)DataStream will not compress output file and please don&#8217;t set codeC
+(2)CompressedStream requires set hdfs.codeC with an available codeC</td>
+</tr>
+<tr class="row-odd"><td>hdfs.maxOpenFiles</td>
+<td>5000</td>
+<td>Allow only this number of open files. If this number is exceeded, the oldest file is closed.</td>
+</tr>
+<tr class="row-even"><td>hdfs.minBlockReplicas</td>
+<td>&#8211;</td>
+<td>Specify minimum number of replicas per HDFS block. If not specified, it comes from the default Hadoop config in the classpath.</td>
+</tr>
+<tr class="row-odd"><td>hdfs.writeFormat</td>
+<td>Writable</td>
+<td>Format for sequence file records. One of <tt class="docutils literal"><span class="pre">Text</span></tt> or <tt class="docutils literal"><span class="pre">Writable</span></tt>. Set to <tt class="docutils literal"><span class="pre">Text</span></tt> before creating data files with Flume, otherwise those files cannot be read by either Apache Impala (incubating) or Apache Hive.</td>
+</tr>
+<tr class="row-even"><td>hdfs.threadsPoolSize</td>
+<td>10</td>
+<td>Number of threads per HDFS sink for HDFS IO ops (open, write, etc.)</td>
+</tr>
+<tr class="row-odd"><td>hdfs.rollTimerPoolSize</td>
+<td>1</td>
+<td>Number of threads per HDFS sink for scheduling timed file rolling</td>
+</tr>
+<tr class="row-even"><td>hdfs.kerberosPrincipal</td>
+<td>&#8211;</td>
+<td>Kerberos user principal for accessing secure HDFS</td>
+</tr>
+<tr class="row-odd"><td>hdfs.kerberosKeytab</td>
+<td>&#8211;</td>
+<td>Kerberos keytab for accessing secure HDFS</td>
+</tr>
+<tr class="row-even"><td>hdfs.proxyUser</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>hdfs.round</td>
+<td>false</td>
+<td>Should the timestamp be rounded down (if true, affects all time based escape sequences except %t)</td>
+</tr>
+<tr class="row-even"><td>hdfs.roundValue</td>
+<td>1</td>
+<td>Rounded down to the highest multiple of this (in the unit configured using <tt class="docutils literal"><span class="pre">hdfs.roundUnit</span></tt>), less than current time.</td>
+</tr>
+<tr class="row-odd"><td>hdfs.roundUnit</td>
+<td>second</td>
+<td>The unit of the round down value - <tt class="docutils literal"><span class="pre">second</span></tt>, <tt class="docutils literal"><span class="pre">minute</span></tt> or <tt class="docutils literal"><span class="pre">hour</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>hdfs.timeZone</td>
+<td>Local Time</td>
+<td>Name of the timezone that should be used for resolving the directory path, e.g. America/Los_Angeles.</td>
+</tr>
+<tr class="row-odd"><td>hdfs.useLocalTimeStamp</td>
+<td>false</td>
+<td>Use the local time (instead of the timestamp from the event header) while replacing the escape sequences.</td>
+</tr>
+<tr class="row-even"><td>hdfs.closeTries</td>
+<td>0</td>
+<td>Number of times the sink must try renaming a file, after initiating a close attempt. If set to 1, this sink will not re-try a failed rename
+(due to, for example, NameNode or DataNode failure), and may leave the file in an open state with a .tmp extension.
+If set to 0, the sink will try to rename the file until the file is eventually renamed (there is no limit on the number of times it would try).
+The file may still remain open if the close call fails but the data will be intact and in this case, the file will be closed only after a Flume restart.</td>
+</tr>
+<tr class="row-odd"><td>hdfs.retryInterval</td>
+<td>180</td>
+<td>Time in seconds between consecutive attempts to close a file. Each close call costs multiple RPC round-trips to the Namenode,
+so setting this too low can cause a lot of load on the name node. If set to 0 or less, the sink will not
+attempt to close the file if the first attempt fails, and may leave the file open or with a &#8221;.tmp&#8221; extension.</td>
+</tr>
+<tr class="row-even"><td>serializer</td>
+<td><tt class="docutils literal"><span class="pre">TEXT</span></tt></td>
+<td>Other possible options include <tt class="docutils literal"><span class="pre">avro_event</span></tt> or the
+fully-qualified class name of an implementation of the
+<tt class="docutils literal"><span class="pre">EventSerializer.Builder</span></tt> interface.</td>
+</tr>
+<tr class="row-odd"><td>serializer.*</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Deprecated Properties</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="10%" />
+<col width="72%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>hdfs.callTimeout</td>
+<td>30000</td>
+<td>Number of milliseconds allowed for HDFS operations, such as open, write, flush, close.
+This number should be increased if many HDFS timeout operations are occurring.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">hdfs</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hdfs.path</span> <span class="o">=</span> <span class="s">/flume/events/%Y-%m-%d/%H%M/%S</span>
+<span class="na">a1.sinks.k1.hdfs.filePrefix</span> <span class="o">=</span> <span class="s">events-</span>
+<span class="na">a1.sinks.k1.hdfs.round</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sinks.k1.hdfs.roundValue</span> <span class="o">=</span> <span class="s">10</span>
+<span class="na">a1.sinks.k1.hdfs.roundUnit</span> <span class="o">=</span> <span class="s">minute</span>
+</pre></div>
+</div>
+<p>The above configuration will round down the timestamp to the last 10th minute. For example, an event with
+timestamp 11:54:34 AM, June 12, 2012 will cause the hdfs path to become <tt class="docutils literal"><span class="pre">/flume/events/2012-06-12/1150/00</span></tt>.</p>
+</div>
+<div class="section" id="hive-sink">
+<h4>Hive Sink<a class="headerlink" href="#hive-sink" title="Permalink to this headline">¶</a></h4>
+<p>This sink streams events containing delimited text or JSON data directly into a Hive table or partition.
+Events are written using Hive transactions. As soon as a set of events are committed to Hive, they become
+immediately visible to Hive queries. Partitions to which flume will stream to can either be pre-created
+or, optionally, Flume can create them if they are missing. Fields from incoming event data are mapped to
+corresponding columns in the Hive table.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="15%" />
+<col width="8%" />
+<col width="77%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">hive</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>hive.metastore</strong></td>
+<td>&#8211;</td>
+<td>Hive metastore URI (eg thrift://a.b.com:9083 )</td>
+</tr>
+<tr class="row-odd"><td><strong>hive.database</strong></td>
+<td>&#8211;</td>
+<td>Hive database name</td>
+</tr>
+<tr class="row-even"><td><strong>hive.table</strong></td>
+<td>&#8211;</td>
+<td>Hive table name</td>
+</tr>
+<tr class="row-odd"><td>hive.partition</td>
+<td>&#8211;</td>
+<td>Comma separate list of partition values identifying the partition to write to. May contain escape
+sequences. E.g: If the table is partitioned by (continent: string, country :string, time : string)
+then &#8216;Asia,India,2014-02-26-01-21&#8217; will indicate continent=Asia,country=India,time=2014-02-26-01-21</td>
+</tr>
+<tr class="row-even"><td>hive.txnsPerBatchAsk</td>
+<td>100</td>
+<td>Hive grants a <em>batch of transactions</em> instead of single transactions to streaming clients like Flume.
+This setting configures the number of desired transactions per Transaction Batch. Data from all
+transactions in a single batch end up in a single file. Flume will write a maximum of batchSize events
+in each transaction in the batch. This setting in conjunction with batchSize provides control over the
+size of each file. Note that eventually Hive will transparently compact these files into larger files.</td>
+</tr>
+<tr class="row-odd"><td>heartBeatInterval</td>
+<td>240</td>
+<td>(In seconds) Interval between consecutive heartbeats sent to Hive to keep unused transactions from expiring.
+Set this value to 0 to disable heartbeats.</td>
+</tr>
+<tr class="row-even"><td>autoCreatePartitions</td>
+<td>true</td>
+<td>Flume will automatically create the necessary Hive partitions to stream to</td>
+</tr>
+<tr class="row-odd"><td>batchSize</td>
+<td>15000</td>
+<td>Max number of events written to Hive in a single Hive transaction</td>
+</tr>
+<tr class="row-even"><td>maxOpenConnections</td>
+<td>500</td>
+<td>Allow only this number of open connections. If this number is exceeded, the least recently used connection is closed.</td>
+</tr>
+<tr class="row-odd"><td>callTimeout</td>
+<td>10000</td>
+<td>(In milliseconds) Timeout for Hive &amp; HDFS I/O operations, such as openTxn, write, commit, abort.</td>
+</tr>
+<tr class="row-even"><td><strong>serializer</strong></td>
+<td>&nbsp;</td>
+<td>Serializer is responsible for parsing out field from the event and mapping them to columns in the hive table.
+Choice of serializer depends upon the format of the data in the event. Supported serializers: DELIMITED and JSON</td>
+</tr>
+<tr class="row-odd"><td>roundUnit</td>
+<td>minute</td>
+<td>The unit of the round down value - <tt class="docutils literal"><span class="pre">second</span></tt>, <tt class="docutils literal"><span class="pre">minute</span></tt> or <tt class="docutils literal"><span class="pre">hour</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>roundValue</td>
+<td>1</td>
+<td>Rounded down to the highest multiple of this (in the unit configured using hive.roundUnit), less than current time</td>
+</tr>
+<tr class="row-odd"><td>timeZone</td>
+<td>Local Time</td>
+<td>Name of the timezone that should be used for resolving the escape sequences in partition, e.g. America/Los_Angeles.</td>
+</tr>
+<tr class="row-even"><td>useLocalTimeStamp</td>
+<td>false</td>
+<td>Use the local time (instead of the timestamp from the event header) while replacing the escape sequences.</td>
+</tr>
+</tbody>
+</table>
+<p>Following serializers are provided for Hive sink:</p>
+<p><strong>JSON</strong>: Handles UTF8 encoded Json (strict syntax) events and requires no configration. Object names
+in the JSON are mapped directly to columns with the same name in the Hive table.
+Internally uses org.apache.hive.hcatalog.data.JsonSerDe but is independent of the Serde of the Hive table.
+This serializer requires HCatalog to be installed.</p>
+<p><strong>DELIMITED</strong>: Handles simple delimited textual events.
+Internally uses LazySimpleSerde but is independent of the Serde of the Hive table.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="10%" />
+<col width="68%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>serializer.delimiter</td>
+<td>,</td>
+<td>(Type: string) The field delimiter in the incoming data. To use special
+characters, surround them with double quotes like &#8220;\t&#8221;</td>
+</tr>
+<tr class="row-odd"><td><strong>serializer.fieldnames</strong></td>
+<td>&#8211;</td>
+<td>The mapping from input fields to columns in hive table. Specified as a
+comma separated list (no spaces) of hive table columns names, identifying
+the input fields in order of their occurrence. To skip fields leave the
+column name unspecified. Eg. &#8216;time,,ip,message&#8217; indicates the 1st, 3rd
+and 4th fields in input map to time, ip and message columns in the hive table.</td>
+</tr>
+<tr class="row-even"><td>serializer.serdeSeparator</td>
+<td>Ctrl-A</td>
+<td>(Type: character) Customizes the separator used by underlying serde. There
+can be a gain in efficiency if the fields in serializer.fieldnames are in
+same order as table columns, the serializer.delimiter is same as the
+serializer.serdeSeparator and number of fields in serializer.fieldnames
+is less than or equal to number of table columns, as the fields in incoming
+event body do not need to be reordered to match order of table columns.
+Use single quotes for special characters like &#8216;\t&#8217;.
+Ensure input fields do not contain this character. NOTE: If serializer.delimiter
+is a single character, preferably set this to the same character</td>
+</tr>
+</tbody>
+</table>
+<p>The following are the escape sequences supported:</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="10%" />
+<col width="90%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Alias</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>%{host}</td>
+<td>Substitute value of event header named &#8220;host&#8221;. Arbitrary header names are supported.</td>
+</tr>
+<tr class="row-odd"><td>%t</td>
+<td>Unix time in milliseconds</td>
+</tr>
+<tr class="row-even"><td>%a</td>
+<td>locale&#8217;s short weekday name (Mon, Tue, ...)</td>
+</tr>
+<tr class="row-odd"><td>%A</td>
+<td>locale&#8217;s full weekday name (Monday, Tuesday, ...)</td>
+</tr>
+<tr class="row-even"><td>%b</td>
+<td>locale&#8217;s short month name (Jan, Feb, ...)</td>
+</tr>
+<tr class="row-odd"><td>%B</td>
+<td>locale&#8217;s long month name (January, February, ...)</td>
+</tr>
+<tr class="row-even"><td>%c</td>
+<td>locale&#8217;s date and time (Thu Mar 3 23:05:25 2005)</td>
+</tr>
+<tr class="row-odd"><td>%d</td>
+<td>day of month (01)</td>
+</tr>
+<tr class="row-even"><td>%D</td>
+<td>date; same as %m/%d/%y</td>
+</tr>
+<tr class="row-odd"><td>%H</td>
+<td>hour (00..23)</td>
+</tr>
+<tr class="row-even"><td>%I</td>
+<td>hour (01..12)</td>
+</tr>
+<tr class="row-odd"><td>%j</td>
+<td>day of year (001..366)</td>
+</tr>
+<tr class="row-even"><td>%k</td>
+<td>hour ( 0..23)</td>
+</tr>
+<tr class="row-odd"><td>%m</td>
+<td>month (01..12)</td>
+</tr>
+<tr class="row-even"><td>%M</td>
+<td>minute (00..59)</td>
+</tr>
+<tr class="row-odd"><td>%p</td>
+<td>locale&#8217;s equivalent of am or pm</td>
+</tr>
+<tr class="row-even"><td>%s</td>
+<td>seconds since 1970-01-01 00:00:00 UTC</td>
+</tr>
+<tr class="row-odd"><td>%S</td>
+<td>second (00..59)</td>
+</tr>
+<tr class="row-even"><td>%y</td>
+<td>last two digits of year (00..99)</td>
+</tr>
+<tr class="row-odd"><td>%Y</td>
+<td>year (2010)</td>
+</tr>
+<tr class="row-even"><td>%z</td>
+<td>+hhmm numeric timezone (for example, -0400)</td>
+</tr>
+</tbody>
+</table>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">For all of the time related escape sequences, a header with the key
+&#8220;timestamp&#8221; must exist among the headers of the event (unless <tt class="docutils literal"><span class="pre">useLocalTimeStamp</span></tt> is set to <tt class="docutils literal"><span class="pre">true</span></tt>). One way to add
+this automatically is to use the TimestampInterceptor.</p>
+</div>
+<p>Example Hive table :</p>
+<div class="highlight-properties"><pre>create table weblogs ( id int , msg string )
+    partitioned by (continent string, country string, time string)
+    clustered by (id) into 5 buckets
+    stored as orc;</pre>
+</div>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">memory</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">hive</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hive.metastore</span> <span class="o">=</span> <span class="s">thrift://127.0.0.1:9083</span>
+<span class="na">a1.sinks.k1.hive.database</span> <span class="o">=</span> <span class="s">logsdb</span>
+<span class="na">a1.sinks.k1.hive.table</span> <span class="o">=</span> <span class="s">weblogs</span>
+<span class="na">a1.sinks.k1.hive.partition</span> <span class="o">=</span> <span class="s">asia,%{country},%Y-%m-%d-%H-%M</span>
+<span class="na">a1.sinks.k1.useLocalTimeStamp</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sinks.k1.round</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sinks.k1.roundValue</span> <span class="o">=</span> <span class="s">10</span>
+<span class="na">a1.sinks.k1.roundUnit</span> <span class="o">=</span> <span class="s">minute</span>
+<span class="na">a1.sinks.k1.serializer</span> <span class="o">=</span> <span class="s">DELIMITED</span>
+<span class="na">a1.sinks.k1.serializer.delimiter</span> <span class="o">=</span> <span class="s">&quot;\t&quot;</span>
+<span class="na">a1.sinks.k1.serializer.serdeSeparator</span> <span class="o">=</span> <span class="s">&#39;\t&#39;</span>
+<span class="na">a1.sinks.k1.serializer.fieldnames</span> <span class="o">=</span><span class="s">id,,msg</span>
+</pre></div>
+</div>
+<p>The above configuration will round down the timestamp to the last 10th minute. For example, an event with
+timestamp header set to 11:54:34 AM, June 12, 2012 and &#8216;country&#8217; header set to &#8216;india&#8217; will evaluate to the
+partition (continent=&#8217;asia&#8217;,country=&#8217;india&#8217;,time=&#8216;2012-06-12-11-50&#8217;. The serializer is configured to
+accept tab separated input containing three fields and to skip the second field.</p>
+</div>
+<div class="section" id="logger-sink">
+<h4>Logger Sink<a class="headerlink" href="#logger-sink" title="Permalink to this headline">¶</a></h4>
+<p>Logs event at INFO level. Typically useful for testing/debugging purpose. Required properties are
+in <strong>bold</strong>. This sink is the only exception which doesn&#8217;t require the extra configuration
+explained in the <a class="reference internal" href="#logging-raw-data">Logging raw data</a> section.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="10%" />
+<col width="70%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">logger</span></tt></td>
+</tr>
+<tr class="row-even"><td>maxBytesToLog</td>
+<td>16</td>
+<td>Maximum number of bytes of the Event body to log</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">logger</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="avro-sink">
+<h4>Avro Sink<a class="headerlink" href="#avro-sink" title="Permalink to this headline">¶</a></h4>
+<p>This sink forms one half of Flume&#8217;s tiered collection support. Flume events
+sent to this sink are turned into Avro events and sent to the configured
+hostname / port pair. The events are taken from the configured Channel in
+batches of the configured batch size.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="5%" />
+<col width="10%" />
+<col width="84%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">avro</span></tt>.</td>
+</tr>
+<tr class="row-even"><td><strong>hostname</strong></td>
+<td>&#8211;</td>
+<td>The hostname or IP address to bind to.</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>The port # to listen on.</td>
+</tr>
+<tr class="row-even"><td>batch-size</td>
+<td>100</td>
+<td>number of event to batch together for send.</td>
+</tr>
+<tr class="row-odd"><td>connect-timeout</td>
+<td>20000</td>
+<td>Amount of time (ms) to allow for the first (handshake) request.</td>
+</tr>
+<tr class="row-even"><td>request-timeout</td>
+<td>20000</td>
+<td>Amount of time (ms) to allow for requests after the first.</td>
+</tr>
+<tr class="row-odd"><td>reset-connection-interval</td>
+<td>none</td>
+<td>Amount of time (s) before the connection to the next hop is reset. This will force the Avro Sink to reconnect to the next hop. This will allow the sink to connect to hosts behind a hardware load-balancer when news hosts are added without having to restart the agent.</td>
+</tr>
+<tr class="row-even"><td>compression-type</td>
+<td>none</td>
+<td>This can be &#8220;none&#8221; or &#8220;deflate&#8221;.  The compression-type must match the compression-type of matching AvroSource</td>
+</tr>
+<tr class="row-odd"><td>compression-level</td>
+<td>6</td>
+<td>The level of compression to compress event. 0 = no compression and 1-9 is compression.  The higher the number the more compression</td>
+</tr>
+<tr class="row-even"><td>ssl</td>
+<td>false</td>
+<td>Set to true to enable SSL for this AvroSink. When configuring SSL, you can optionally set a &#8220;truststore&#8221;, &#8220;truststore-password&#8221;, &#8220;truststore-type&#8221;, and specify whether to &#8220;trust-all-certs&#8221;.</td>
+</tr>
+<tr class="row-odd"><td>trust-all-certs</td>
+<td>false</td>
+<td>If this is set to true, SSL server certificates for remote servers (Avro Sources) will not be checked. This should NOT be used in production because it makes it easier for an attacker to execute a man-in-the-middle attack and &#8220;listen in&#8221; on the encrypted connection.</td>
+</tr>
+<tr class="row-even"><td>truststore</td>
+<td>&#8211;</td>
+<td>The path to a custom Java truststore file. Flume uses the certificate authority information in this file to determine whether the remote Avro Source&#8217;s SSL authentication credentials should be trusted. If not specified, then the global keystore will be used. If the global keystore not specified either, then the default Java JSSE certificate authority files (typically &#8220;jssecacerts&#8221; or &#8220;cacerts&#8221; in the Oracle JRE) will be used.</td>
+</tr>
+<tr class="row-odd"><td>truststore-password</td>
+<td>&#8211;</td>
+<td>The password for the truststore. If not specified, then the global keystore password will be used (if defined).</td>
+</tr>
+<tr class="row-even"><td>truststore-type</td>
+<td>JKS</td>
+<td>The type of the Java truststore. This can be &#8220;JKS&#8221; or other supported Java truststore type. If not specified, then the global keystore type will be used (if defined, otherwise the defautl is JKS).</td>
+</tr>
+<tr class="row-odd"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude. SSLv3 will always be excluded in addition to the protocols specified.</td>
+</tr>
+<tr class="row-even"><td>maxIoWorkers</td>
+<td>2 * the number of available processors in the machine</td>
+<td>The maximum number of I/O worker threads. This is configured on the NettyAvroRpcClient NioClientSocketChannelFactory.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">avro</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hostname</span> <span class="o">=</span> <span class="s">10.10.10.10</span>
+<span class="na">a1.sinks.k1.port</span> <span class="o">=</span> <span class="s">4545</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="thrift-sink">
+<h4>Thrift Sink<a class="headerlink" href="#thrift-sink" title="Permalink to this headline">¶</a></h4>
+<p>This sink forms one half of Flume&#8217;s tiered collection support. Flume events
+sent to this sink are turned into Thrift events and sent to the configured
+hostname / port pair. The events are taken from the configured Channel in
+batches of the configured batch size.</p>
+<p>Thrift sink can be configured to start in secure mode by enabling kerberos authentication.
+To communicate with a Thrift source started in secure mode, the Thrift sink should also
+operate in secure mode. client-principal and client-keytab are the properties used by the
+Thrift sink to authenticate to the kerberos KDC. The server-principal represents the
+principal of the Thrift source this sink is configured to connect to in secure mode.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="6%" />
+<col width="2%" />
+<col width="93%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">thrift</span></tt>.</td>
+</tr>
+<tr class="row-even"><td><strong>hostname</strong></td>
+<td>&#8211;</td>
+<td>The hostname or IP address to bind to.</td>
+</tr>
+<tr class="row-odd"><td><strong>port</strong></td>
+<td>&#8211;</td>
+<td>The port # to listen on.</td>
+</tr>
+<tr class="row-even"><td>batch-size</td>
+<td>100</td>
+<td>number of event to batch together for send.</td>
+</tr>
+<tr class="row-odd"><td>connect-timeout</td>
+<td>20000</td>
+<td>Amount of time (ms) to allow for the first (handshake) request.</td>
+</tr>
+<tr class="row-even"><td>request-timeout</td>
+<td>20000</td>
+<td>Amount of time (ms) to allow for requests after the first.</td>
+</tr>
+<tr class="row-odd"><td>connection-reset-interval</td>
+<td>none</td>
+<td>Amount of time (s) before the connection to the next hop is reset. This will force the Thrift Sink to reconnect to the next hop. This will allow the sink to connect to hosts behind a hardware load-balancer when news hosts are added without having to restart the agent.</td>
+</tr>
+<tr class="row-even"><td>ssl</td>
+<td>false</td>
+<td>Set to true to enable SSL for this ThriftSink. When configuring SSL, you can optionally set a &#8220;truststore&#8221;, &#8220;truststore-password&#8221; and &#8220;truststore-type&#8221;</td>
+</tr>
+<tr class="row-odd"><td>truststore</td>
+<td>&#8211;</td>
+<td>The path to a custom Java truststore file. Flume uses the certificate authority information in this file to determine whether the remote Thrift Source&#8217;s SSL authentication credentials should be trusted. If not specified, then the global keystore will be used. If the global keystore not specified either, then the default Java JSSE certificate authority files (typically &#8220;jssecacerts&#8221; or &#8220;cacerts&#8221; in the Oracle JRE) will be used.</td>
+</tr>
+<tr class="row-even"><td>truststore-password</td>
+<td>&#8211;</td>
+<td>The password for the truststore. If not specified, then the global keystore password will be used (if defined).</td>
+</tr>
+<tr class="row-odd"><td>truststore-type</td>
+<td>JKS</td>
+<td>The type of the Java truststore. This can be &#8220;JKS&#8221; or other supported Java truststore type. If not specified, then the global keystore type will be used (if defined, otherwise the defautl is JKS).</td>
+</tr>
+<tr class="row-even"><td>exclude-protocols</td>
+<td>SSLv3</td>
+<td>Space-separated list of SSL/TLS protocols to exclude</td>
+</tr>
+<tr class="row-odd"><td>kerberos</td>
+<td>false</td>
+<td>Set to true to enable kerberos authentication. In kerberos mode, client-principal, client-keytab and server-principal are required for successful authentication and communication to a kerberos enabled Thrift Source.</td>
+</tr>
+<tr class="row-even"><td>client-principal</td>
+<td>—-</td>
+<td>The kerberos principal used by the Thrift Sink to authenticate to the kerberos KDC.</td>
+</tr>
+<tr class="row-odd"><td>client-keytab</td>
+<td>—-</td>
+<td>The keytab location used by the Thrift Sink in combination with the client-principal to authenticate to the kerberos KDC.</td>
+</tr>
+<tr class="row-even"><td>server-principal</td>
+<td>&#8211;</td>
+<td>The kerberos principal of the Thrift Source to which the Thrift Sink is configured to connect to.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">thrift</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hostname</span> <span class="o">=</span> <span class="s">10.10.10.10</span>
+<span class="na">a1.sinks.k1.port</span> <span class="o">=</span> <span class="s">4545</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="irc-sink">
+<h4>IRC Sink<a class="headerlink" href="#irc-sink" title="Permalink to this headline">¶</a></h4>
+<p>The IRC sink takes messages from attached channel and relays those to
+configured IRC destinations.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="19%" />
+<col width="9%" />
+<col width="72%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">irc</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>hostname</strong></td>
+<td>&#8211;</td>
+<td>The hostname or IP address to connect to</td>
+</tr>
+<tr class="row-odd"><td>port</td>
+<td>6667</td>
+<td>The port number of remote host to connect</td>
+</tr>
+<tr class="row-even"><td><strong>nick</strong></td>
+<td>&#8211;</td>
+<td>Nick name</td>
+</tr>
+<tr class="row-odd"><td>user</td>
+<td>&#8211;</td>
+<td>User name</td>
+</tr>
+<tr class="row-even"><td>password</td>
+<td>&#8211;</td>
+<td>User password</td>
+</tr>
+<tr class="row-odd"><td><strong>chan</strong></td>
+<td>&#8211;</td>
+<td>channel</td>
+</tr>
+<tr class="row-even"><td>name</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>splitlines</td>
+<td>&#8211;</td>
+<td>(boolean)</td>
+</tr>
+<tr class="row-even"><td>splitchars</td>
+<td>n</td>
+<td>line separator (if you were to enter the default value
+into the config file, then you would need to escape the
+backslash, like this: &#8220;\n&#8221;)</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">irc</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hostname</span> <span class="o">=</span> <span class="s">irc.yourdomain.com</span>
+<span class="na">a1.sinks.k1.nick</span> <span class="o">=</span> <span class="s">flume</span>
+<span class="na">a1.sinks.k1.chan</span> <span class="o">=</span> <span class="s">#flume</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="file-roll-sink">
+<h4>File Roll Sink<a class="headerlink" href="#file-roll-sink" title="Permalink to this headline">¶</a></h4>
+<p>Stores events on the local filesystem.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="5%" />
+<col width="78%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">file_roll</span></tt>.</td>
+</tr>
+<tr class="row-even"><td><strong>sink.directory</strong></td>
+<td>&#8211;</td>
+<td>The directory where files will be stored</td>
+</tr>
+<tr class="row-odd"><td>sink.pathManager</td>
+<td>DEFAULT</td>
+<td>The PathManager implementation to use.</td>
+</tr>
+<tr class="row-even"><td>sink.pathManager.extension</td>
+<td>&#8211;</td>
+<td>The file extension if the default PathManager is used.</td>
+</tr>
+<tr class="row-odd"><td>sink.pathManager.prefix</td>
+<td>&#8211;</td>
+<td>A character string to add to the beginning of the file name if the default PathManager is used</td>
+</tr>
+<tr class="row-even"><td>sink.rollInterval</td>
+<td>30</td>
+<td>Roll the file every 30 seconds. Specifying 0 will disable rolling and cause all events to be written to a single file.</td>
+</tr>
+<tr class="row-odd"><td>sink.serializer</td>
+<td>TEXT</td>
+<td>Other possible options include <tt class="docutils literal"><span class="pre">avro_event</span></tt> or the FQCN of an implementation of EventSerializer.Builder interface.</td>
+</tr>
+<tr class="row-even"><td>sink.batchSize</td>
+<td>100</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">file_roll</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.sink.directory</span> <span class="o">=</span> <span class="s">/var/log/flume</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="null-sink">
+<h4>Null Sink<a class="headerlink" href="#null-sink" title="Permalink to this headline">¶</a></h4>
+<p>Discards all events it receives from the channel.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="11%" />
+<col width="70%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">null</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">null</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="hbasesinks">
+<h4>HBaseSinks<a class="headerlink" href="#hbasesinks" title="Permalink to this headline">¶</a></h4>
+<div class="section" id="hbasesink">
+<h5>HBaseSink<a class="headerlink" href="#hbasesink" title="Permalink to this headline">¶</a></h5>
+<p>This sink writes data to HBase. The Hbase configuration is picked up from the first
+hbase-site.xml encountered in the classpath. A class implementing HbaseEventSerializer
+which is specified by the configuration is used to convert the events into
+HBase puts and/or increments. These puts and increments are then written
+to HBase. This sink provides the same consistency guarantees as HBase,
+which is currently row-wise atomicity. In the event of Hbase failing to
+write certain events, the sink will replay all events in that transaction.</p>
+<p>The HBaseSink supports writing data to secure HBase. To write to secure HBase, the user
+the agent is running as must have write permissions to the table the sink is configured
+to write to. The principal and keytab to use to authenticate against the KDC can be specified
+in the configuration. The hbase-site.xml in the Flume agent&#8217;s classpath
+must have authentication set to <tt class="docutils literal"><span class="pre">kerberos</span></tt> (For details on how to do this, please refer to
+HBase documentation).</p>
+<p>For convenience, two serializers are provided with Flume. The
+SimpleHbaseEventSerializer (org.apache.flume.sink.hbase.SimpleHbaseEventSerializer)
+writes the event body
+as-is to HBase, and optionally increments a column in Hbase. This is primarily
+an example implementation. The RegexHbaseEventSerializer
+(org.apache.flume.sink.hbase.RegexHbaseEventSerializer) breaks the event body
+based on the given regex and writes each part into different columns.</p>
+<p>The type is the FQCN: org.apache.flume.sink.hbase.HBaseSink.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="10%" />
+<col width="31%" />
+<col width="59%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">hbase</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>table</strong></td>
+<td>&#8211;</td>
+<td>The name of the table in Hbase to write to.</td>
+</tr>
+<tr class="row-odd"><td><strong>columnFamily</strong></td>
+<td>&#8211;</td>
+<td>The column family in Hbase to write to.</td>
+</tr>
+<tr class="row-even"><td>zookeeperQuorum</td>
+<td>&#8211;</td>
+<td>The quorum spec. This is the value for the property <tt class="docutils literal"><span class="pre">hbase.zookeeper.quorum</span></tt> in hbase-site.xml</td>
+</tr>
+<tr class="row-odd"><td>znodeParent</td>
+<td>/hbase</td>
+<td>The base path for the znode for the -ROOT- region. Value of <tt class="docutils literal"><span class="pre">zookeeper.znode.parent</span></tt> in hbase-site.xml</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>Number of events to be written per txn.</td>
+</tr>
+<tr class="row-odd"><td>coalesceIncrements</td>
+<td>false</td>
+<td>Should the sink coalesce multiple increments to a cell per batch. This might give
+better performance if there are multiple increments to a limited number of cells.</td>
+</tr>
+<tr class="row-even"><td>serializer</td>
+<td>org.apache.flume.sink.hbase.SimpleHbaseEventSerializer</td>
+<td>Default increment column = &#8220;iCol&#8221;, payload column = &#8220;pCol&#8221;.</td>
+</tr>
+<tr class="row-odd"><td>serializer.*</td>
+<td>&#8211;</td>
+<td>Properties to be passed to the serializer.</td>
+</tr>
+<tr class="row-even"><td>kerberosPrincipal</td>
+<td>&#8211;</td>
+<td>Kerberos user principal for accessing secure HBase</td>
+</tr>
+<tr class="row-odd"><td>kerberosKeytab</td>
+<td>&#8211;</td>
+<td>Kerberos keytab for accessing secure HBase</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">hbase</span>
+<span class="na">a1.sinks.k1.table</span> <span class="o">=</span> <span class="s">foo_table</span>
+<span class="na">a1.sinks.k1.columnFamily</span> <span class="o">=</span> <span class="s">bar_cf</span>
+<span class="na">a1.sinks.k1.serializer</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.hbase.RegexHbaseEventSerializer</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="hbase2sink">
+<h5>HBase2Sink<a class="headerlink" href="#hbase2sink" title="Permalink to this headline">¶</a></h5>
+<p>HBase2Sink is the equivalent of HBaseSink for HBase version 2.
+The provided functionality and the configuration parameters are the same as in case of HBaseSink (except the hbase2 tag in the sink type and the package/class names).</p>
+<p>The type is the FQCN: org.apache.flume.sink.hbase2.HBase2Sink.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="10%" />
+<col width="31%" />
+<col width="58%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">hbase2</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>table</strong></td>
+<td>&#8211;</td>
+<td>The name of the table in HBase to write to.</td>
+</tr>
+<tr class="row-odd"><td><strong>columnFamily</strong></td>
+<td>&#8211;</td>
+<td>The column family in HBase to write to.</td>
+</tr>
+<tr class="row-even"><td>zookeeperQuorum</td>
+<td>&#8211;</td>
+<td>The quorum spec. This is the value for the property <tt class="docutils literal"><span class="pre">hbase.zookeeper.quorum</span></tt> in hbase-site.xml</td>
+</tr>
+<tr class="row-odd"><td>znodeParent</td>
+<td>/hbase</td>
+<td>The base path for the znode for the -ROOT- region. Value of <tt class="docutils literal"><span class="pre">zookeeper.znode.parent</span></tt> in hbase-site.xml</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>Number of events to be written per txn.</td>
+</tr>
+<tr class="row-odd"><td>coalesceIncrements</td>
+<td>false</td>
+<td>Should the sink coalesce multiple increments to a cell per batch. This might give
+better performance if there are multiple increments to a limited number of cells.</td>
+</tr>
+<tr class="row-even"><td>serializer</td>
+<td>org.apache.flume.sink.hbase2.SimpleHBase2EventSerializer</td>
+<td>Default increment column = &#8220;iCol&#8221;, payload column = &#8220;pCol&#8221;.</td>
+</tr>
+<tr class="row-odd"><td>serializer.*</td>
+<td>&#8211;</td>
+<td>Properties to be passed to the serializer.</td>
+</tr>
+<tr class="row-even"><td>kerberosPrincipal</td>
+<td>&#8211;</td>
+<td>Kerberos user principal for accessing secure HBase</td>
+</tr>
+<tr class="row-odd"><td>kerberosKeytab</td>
+<td>&#8211;</td>
+<td>Kerberos keytab for accessing secure HBase</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">hbase2</span>
+<span class="na">a1.sinks.k1.table</span> <span class="o">=</span> <span class="s">foo_table</span>
+<span class="na">a1.sinks.k1.columnFamily</span> <span class="o">=</span> <span class="s">bar_cf</span>
+<span class="na">a1.sinks.k1.serializer</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.hbase2.RegexHBase2EventSerializer</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="asynchbasesink">
+<h5>AsyncHBaseSink<a class="headerlink" href="#asynchbasesink" title="Permalink to this headline">¶</a></h5>
+<p>This sink writes data to HBase using an asynchronous model. A class implementing
+AsyncHbaseEventSerializer which is specified by the configuration is used to convert the events into
+HBase puts and/or increments. These puts and increments are then written
+to HBase. This sink uses the <a class="reference external" href="https://github.com/OpenTSDB/asynchbase">Asynchbase API</a> to write to
+HBase. This sink provides the same consistency guarantees as HBase,
+which is currently row-wise atomicity. In the event of Hbase failing to
+write certain events, the sink will replay all events in that transaction.
+AsyncHBaseSink can only be used with HBase 1.x. The async client library used by AsyncHBaseSink is not available for HBase 2.
+The type is the FQCN: org.apache.flume.sink.hbase.AsyncHBaseSink.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="9%" />
+<col width="29%" />
+<col width="61%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">asynchbase</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>table</strong></td>
+<td>&#8211;</td>
+<td>The name of the table in Hbase to write to.</td>
+</tr>
+<tr class="row-odd"><td>zookeeperQuorum</td>
+<td>&#8211;</td>
+<td>The quorum spec. This is the value for the property <tt class="docutils literal"><span class="pre">hbase.zookeeper.quorum</span></tt> in hbase-site.xml</td>
+</tr>
+<tr class="row-even"><td>znodeParent</td>
+<td>/hbase</td>
+<td>The base path for the znode for the -ROOT- region. Value of <tt class="docutils literal"><span class="pre">zookeeper.znode.parent</span></tt> in hbase-site.xml</td>
+</tr>
+<tr class="row-odd"><td><strong>columnFamily</strong></td>
+<td>&#8211;</td>
+<td>The column family in Hbase to write to.</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>Number of events to be written per txn.</td>
+</tr>
+<tr class="row-odd"><td>coalesceIncrements</td>
+<td>false</td>
+<td>Should the sink coalesce multiple increments to a cell per batch. This might give
+better performance if there are multiple increments to a limited number of cells.</td>
+</tr>
+<tr class="row-even"><td>timeout</td>
+<td>60000</td>
+<td>The length of time (in milliseconds) the sink waits for acks from hbase for
+all events in a transaction.</td>
+</tr>
+<tr class="row-odd"><td>serializer</td>
+<td>org.apache.flume.sink.hbase.SimpleAsyncHbaseEventSerializer</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>serializer.*</td>
+<td>&#8211;</td>
+<td>Properties to be passed to the serializer.</td>
+</tr>
+<tr class="row-odd"><td>async.*</td>
+<td>&#8211;</td>
+<td>Properties to be passed to asyncHbase library.
+These properties have precedence over the old <tt class="docutils literal"><span class="pre">zookeeperQuorum</span></tt> and <tt class="docutils literal"><span class="pre">znodeParent</span></tt> values.
+You can find the list of the available properties at
+<a class="reference external" href="http://opentsdb.github.io/asynchbase/docs/build/html/configuration.html#properties">the documentation page of AsyncHBase</a>.</td>
+</tr>
+</tbody>
+</table>
+<p>Note that this sink takes the Zookeeper Quorum and parent znode information in
+the configuration. Zookeeper Quorum and parent node configuration may be
+specified in the flume configuration file. Alternatively, these configuration
+values are taken from the first hbase-site.xml file in the classpath.</p>
+<p>If these are not provided in the configuration, then the sink
+will read this information from the first hbase-site.xml file in the classpath.</p>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">asynchbase</span>
+<span class="na">a1.sinks.k1.table</span> <span class="o">=</span> <span class="s">foo_table</span>
+<span class="na">a1.sinks.k1.columnFamily</span> <span class="o">=</span> <span class="s">bar_cf</span>
+<span class="na">a1.sinks.k1.serializer</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.hbase.SimpleAsyncHbaseEventSerializer</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="morphlinesolrsink">
+<h4>MorphlineSolrSink<a class="headerlink" href="#morphlinesolrsink" title="Permalink to this headline">¶</a></h4>
+<p>This sink extracts data from Flume events, transforms it, and loads it in near-real-time into Apache Solr servers, which in turn serve queries to end users or search applications.</p>
+<p>This sink is well suited for use cases that stream raw data into HDFS (via the HdfsSink) and simultaneously extract, transform and load the same data into Solr (via MorphlineSolrSink). In particular, this sink can process arbitrary heterogeneous raw data from disparate data sources and turn it into a data model that is useful to Search applications.</p>
+<p>The ETL functionality is customizable using a <a class="reference external" href="http://cloudera.github.io/cdk/docs/current/cdk-morphlines/index.html">morphline configuration file</a> that defines a chain of transformation commands that pipe event records from one command to another.</p>
+<p>Morphlines can be seen as an evolution of Unix pipelines where the data model is generalized to work with streams of generic records, including arbitrary binary payloads. A morphline command is a bit like a Flume Interceptor. Morphlines can be embedded into Hadoop components such as Flume.</p>
+<p>Commands to parse and transform a set of standard data formats such as log files, Avro, CSV, Text, HTML, XML, PDF, Word, Excel, etc. are provided out of the box, and additional custom commands and parsers for additional data formats can be added as morphline plugins. Any kind of data format can be indexed and any Solr documents for any kind of Solr schema can be generated, and any custom ETL logic can be registered and executed.</p>
+<p>Morphlines manipulate continuous streams of records. The data model can be described as follows: A record is a set of named fields where each field has an ordered list of one or more values. A value can be any Java Object. That is, a record is essentially a hash table where each hash table entry contains a String key and a list of Java Objects as values. (The implementation uses Guava&#8217;s <tt class="docutils literal"><span class="pre">ArrayListMultimap</span></tt>, which is a <tt  [...]
+<p>This sink fills the body of the Flume event into the <tt class="docutils literal"><span class="pre">_attachment_body</span></tt> field of the morphline record, as well as copies the headers of the Flume event into record fields of the same name. The commands can then act on this data.</p>
+<p>Routing to a SolrCloud cluster is supported to improve scalability. Indexing load can be spread across a large number of MorphlineSolrSinks for improved scalability. Indexing load can be replicated across multiple MorphlineSolrSinks for high availability, for example using Flume features such as Load balancing Sink Processor. MorphlineInterceptor can also help to implement dynamic routing to multiple Solr collections (e.g. for multi-tenancy).</p>
+<p>The morphline and solr jars required for your environment must be placed in the lib directory of the Apache Flume installation.</p>
+<p>The type is the FQCN: org.apache.flume.sink.solr.morphline.MorphlineSolrSink</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="7%" />
+<col width="16%" />
+<col width="77%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.sink.solr.morphline.MorphlineSolrSink</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>morphlineFile</strong></td>
+<td>&#8211;</td>
+<td>The relative or absolute path on the local file system to the morphline configuration file. Example: <tt class="docutils literal"><span class="pre">/etc/flume-ng/conf/morphline.conf</span></tt></td>
+</tr>
+<tr class="row-odd"><td>morphlineId</td>
+<td>null</td>
+<td>Optional name used to identify a morphline if there are multiple morphlines in a morphline config file</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>1000</td>
+<td>The maximum number of events to take per flume transaction.</td>
+</tr>
+<tr class="row-odd"><td>batchDurationMillis</td>
+<td>1000</td>
+<td>The maximum duration per flume transaction (ms). The transaction commits after this duration or when batchSize is exceeded, whichever comes first.</td>
+</tr>
+<tr class="row-even"><td>handlerClass</td>
+<td>org.apache.flume.sink.solr.morphline.MorphlineHandlerImpl</td>
+<td>The FQCN of a class implementing org.apache.flume.sink.solr.morphline.MorphlineHandler</td>
+</tr>
+<tr class="row-odd"><td>isProductionMode</td>
+<td>false</td>
+<td>This flag should be enabled for mission critical, large-scale online production systems that need to make progress without downtime when unrecoverable exceptions occur. Corrupt or malformed parser input data, parser bugs, and errors related to unknown Solr schema fields produce unrecoverable exceptions.</td>
+</tr>
+<tr class="row-even"><td>recoverableExceptionClasses</td>
+<td>org.apache.solr.client.solrj.SolrServerException</td>
+<td>Comma separated list of recoverable exceptions that tend to be transient, in which case the corresponding task can be retried. Examples include network connection errors, timeouts, etc. When the production mode flag is set to true, the recoverable exceptions configured using this parameter will not be ignored and hence will lead to retries.</td>
+</tr>
+<tr class="row-odd"><td>isIgnoringRecoverableExceptions</td>
+<td>false</td>
+<td>This flag should be enabled, if an unrecoverable exception is accidentally misclassified as recoverable. This enables the sink to make progress and avoid retrying an event forever.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.solr.morphline.MorphlineSolrSink</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.morphlineFile</span> <span class="o">=</span> <span class="s">/etc/flume-ng/conf/morphline.conf</span>
+<span class="c"># a1.sinks.k1.morphlineId = morphline1</span>
+<span class="c"># a1.sinks.k1.batchSize = 1000</span>
+<span class="c"># a1.sinks.k1.batchDurationMillis = 1000</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="kite-dataset-sink">
+<h4>Kite Dataset Sink<a class="headerlink" href="#kite-dataset-sink" title="Permalink to this headline">¶</a></h4>
+<p>Experimental sink that writes events to a <a class="reference external" href="http://kitesdk.org/docs/current/guide/">Kite Dataset</a>.
+This sink will deserialize the body of each incoming event and store the
+resulting record in a Kite Dataset. It determines target Dataset by loading a
+dataset by URI.</p>
+<p>The only supported serialization is avro, and the record schema must be passed
+in the event headers, using either <tt class="docutils literal"><span class="pre">flume.avro.schema.literal</span></tt> with the JSON
+schema representation or <tt class="docutils literal"><span class="pre">flume.avro.schema.url</span></tt> with a URL where the schema
+may be found (<tt class="docutils literal"><span class="pre">hdfs:/...</span></tt> URIs are supported). This is compatible with the
+Log4jAppender flume client and the spooling directory source&#8217;s Avro
+deserializer using <tt class="docutils literal"><span class="pre">deserializer.schemaType</span> <span class="pre">=</span> <span class="pre">LITERAL</span></tt>.</p>
+<p>Note 1: The <tt class="docutils literal"><span class="pre">flume.avro.schema.hash</span></tt> header is <strong>not supported</strong>.
+Note 2: In some cases, file rolling may occur slightly after the roll interval
+has been exceeded. However, this delay will not exceed 5 seconds. In most
+cases, the delay is neglegible.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="28%" />
+<col width="7%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>Must be org.apache.flume.sink.kite.DatasetSink</td>
+</tr>
+<tr class="row-even"><td><strong>kite.dataset.uri</strong></td>
+<td>&#8211;</td>
+<td>URI of the dataset to open</td>
+</tr>
+<tr class="row-odd"><td>kite.repo.uri</td>
+<td>&#8211;</td>
+<td>URI of the repository to open
+(deprecated; use kite.dataset.uri instead)</td>
+</tr>
+<tr class="row-even"><td>kite.dataset.namespace</td>
+<td>&#8211;</td>
+<td>Namespace of the Dataset where records will be written
+(deprecated; use kite.dataset.uri instead)</td>
+</tr>
+<tr class="row-odd"><td>kite.dataset.name</td>
+<td>&#8211;</td>
+<td>Name of the Dataset where records will be written
+(deprecated; use kite.dataset.uri instead)</td>
+</tr>
+<tr class="row-even"><td>kite.batchSize</td>
+<td>100</td>
+<td>Number of records to process in each batch</td>
+</tr>
+<tr class="row-odd"><td>kite.rollInterval</td>
+<td>30</td>
+<td>Maximum wait time (seconds) before data files are released</td>
+</tr>
+<tr class="row-even"><td>kite.flushable.commitOnBatch</td>
+<td>true</td>
+<td>If <tt class="docutils literal"><span class="pre">true</span></tt>, the Flume transaction will be commited and the
+writer will be flushed on each batch of <tt class="docutils literal"><span class="pre">kite.batchSize</span></tt>
+records. This setting only applies to flushable datasets. When
+<tt class="docutils literal"><span class="pre">true</span></tt>, it&#8217;s possible for temp files with commited data to be
+left in the dataset directory. These files need to be recovered
+by hand for the data to be visible to DatasetReaders.</td>
+</tr>
+<tr class="row-odd"><td>kite.syncable.syncOnBatch</td>
+<td>true</td>
+<td>Controls whether the sink will also sync data when committing
+the transaction. This setting only applies to syncable datasets.
+Syncing gaurentees that data will be written on stable storage
+on the remote system while flushing only gaurentees that data
+has left Flume&#8217;s client buffers. When the
+<tt class="docutils literal"><span class="pre">kite.flushable.commitOnBatch</span></tt> property is set to <tt class="docutils literal"><span class="pre">false</span></tt>,
+this property must also be set to <tt class="docutils literal"><span class="pre">false</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>kite.entityParser</td>
+<td>avro</td>
+<td>Parser that turns Flume <tt class="docutils literal"><span class="pre">Events</span></tt> into Kite entities.
+Valid values are <tt class="docutils literal"><span class="pre">avro</span></tt> and the fully-qualified class name
+of an implementation of the <tt class="docutils literal"><span class="pre">EntityParser.Builder</span></tt> interface.</td>
+</tr>
+<tr class="row-odd"><td>kite.failurePolicy</td>
+<td>retry</td>
+<td>Policy that handles non-recoverable errors such as a missing
+<tt class="docutils literal"><span class="pre">Schema</span></tt> in the <tt class="docutils literal"><span class="pre">Event</span></tt> header. The default value, <tt class="docutils literal"><span class="pre">retry</span></tt>,
+will fail the current batch and try again which matches the old
+behavior. Other valid values are <tt class="docutils literal"><span class="pre">save</span></tt>, which will write the
+raw <tt class="docutils literal"><span class="pre">Event</span></tt> to the <tt class="docutils literal"><span class="pre">kite.error.dataset.uri</span></tt> dataset, and the
+fully-qualified class name of an implementation of the
+<tt class="docutils literal"><span class="pre">FailurePolicy.Builder</span></tt> interface.</td>
+</tr>
+<tr class="row-even"><td>kite.error.dataset.uri</td>
+<td>&#8211;</td>
+<td>URI of the dataset where failed events are saved when
+<tt class="docutils literal"><span class="pre">kite.failurePolicy</span></tt> is set to <tt class="docutils literal"><span class="pre">save</span></tt>. <strong>Required</strong> when
+the <tt class="docutils literal"><span class="pre">kite.failurePolicy</span></tt> is set to <tt class="docutils literal"><span class="pre">save</span></tt>.</td>
+</tr>
+<tr class="row-odd"><td>auth.kerberosPrincipal</td>
+<td>&#8211;</td>
+<td>Kerberos user principal for secure authentication to HDFS</td>
+</tr>
+<tr class="row-even"><td>auth.kerberosKeytab</td>
+<td>&#8211;</td>
+<td>Kerberos keytab location (local FS) for the principal</td>
+</tr>
+<tr class="row-odd"><td>auth.proxyUser</td>
+<td>&#8211;</td>
+<td>The effective user for HDFS actions, if different from
+the kerberos principal</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="kafka-sink">
+<h4>Kafka Sink<a class="headerlink" href="#kafka-sink" title="Permalink to this headline">¶</a></h4>
+<p>This is a Flume Sink implementation that can publish data to a
+<a class="reference external" href="http://kafka.apache.org/">Kafka</a> topic. One of the objective is to integrate Flume
+with Kafka so that pull based processing systems can process the data coming
+through various Flume sources.</p>
+<p>This currently supports Kafka server releases 0.10.1.0 or higher. Testing was done up to 2.0.1 that was the highest avilable version at the time of the release.</p>
+<p>Required properties are marked in bold font.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="10%" />
+<col width="73%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>Must be set to <tt class="docutils literal"><span class="pre">org.apache.flume.sink.kafka.KafkaSink</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>kafka.bootstrap.servers</strong></td>
+<td>&#8211;</td>
+<td>List of brokers Kafka-Sink will connect to, to get the list of topic partitions
+This can be a partial list of brokers, but we recommend at least two for HA.
+The format is comma separated list of hostname:port</td>
+</tr>
+<tr class="row-even"><td>kafka.topic</td>
+<td>default-flume-topic</td>
+<td>The topic in Kafka to which the messages will be published. If this parameter is configured,
+messages will be published to this topic.
+If the event header contains a &#8220;topic&#8221; field, the event will be published to that topic
+overriding the topic configured here.
+Arbitrary header substitution is supported, eg. %{header} is replaced with value of event header named &#8220;header&#8221;.
+(If using the substitution, it is recommended to set &#8220;auto.create.topics.enable&#8221; property of Kafka broker to true.)</td>
+</tr>
+<tr class="row-odd"><td>flumeBatchSize</td>
+<td>100</td>
+<td>How many messages to process in one batch. Larger batches improve throughput while adding latency.</td>
+</tr>
+<tr class="row-even"><td>kafka.producer.acks</td>
+<td>1</td>
+<td>How many replicas must acknowledge a message before its considered successfully written.
+Accepted values are 0 (Never wait for acknowledgement), 1 (wait for leader only), -1 (wait for all replicas)
+Set this to -1 to avoid data loss in some cases of leader failure.</td>
+</tr>
+<tr class="row-odd"><td>useFlumeEventFormat</td>
+<td>false</td>
+<td>By default events are put as bytes onto the Kafka topic directly from the event body. Set to
+true to store events as the Flume Avro binary format. Used in conjunction with the same property
+on the KafkaSource or with the parseAsFlumeEvent property on the Kafka Channel this will preserve
+any Flume headers for the producing side.</td>
+</tr>
+<tr class="row-even"><td>defaultPartitionId</td>
+<td>&#8211;</td>
+<td>Specifies a Kafka partition ID (integer) for all events in this channel to be sent to, unless
+overriden by <tt class="docutils literal"><span class="pre">partitionIdHeader</span></tt>. By default, if this property is not set, events will be
+distributed by the Kafka Producer&#8217;s partitioner - including by <tt class="docutils literal"><span class="pre">key</span></tt> if specified (or by a
+partitioner specified by <tt class="docutils literal"><span class="pre">kafka.partitioner.class</span></tt>).</td>
+</tr>
+<tr class="row-odd"><td>partitionIdHeader</td>
+<td>&#8211;</td>
+<td>When set, the sink will take the value of the field named using the value of this property
+from the event header and send the message to the specified partition of the topic. If the
+value represents an invalid partition, an EventDeliveryException will be thrown. If the header value
+is present then this setting overrides <tt class="docutils literal"><span class="pre">defaultPartitionId</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>allowTopicOverride</td>
+<td>true</td>
+<td>When set, the sink will allow a message to be produced into a topic specified by the <tt class="docutils literal"><span class="pre">topicHeader</span></tt> property (if provided).</td>
+</tr>
+<tr class="row-odd"><td>topicHeader</td>
+<td>topic</td>
+<td>When set in conjunction with <tt class="docutils literal"><span class="pre">allowTopicOverride</span></tt> will produce a message into the value of the header named using the value of this property.
+Care should be taken when using in conjunction with the Kafka Source <tt class="docutils literal"><span class="pre">topicHeader</span></tt> property to avoid creating a loopback.</td>
+</tr>
+<tr class="row-even"><td>kafka.producer.security.protocol</td>
+<td>PLAINTEXT</td>
+<td>Set to SASL_PLAINTEXT, SASL_SSL or SSL if writing to Kafka using some level of security. See below for additional info on secure setup.</td>
+</tr>
+<tr class="row-odd"><td><em>more producer security props</em></td>
+<td>&nbsp;</td>
+<td>If using SASL_PLAINTEXT, SASL_SSL or SSL refer to <a class="reference external" href="http://kafka.apache.org/documentation.html#security">Kafka security</a> for additional
+properties that need to be set on producer.</td>
+</tr>
+<tr class="row-even"><td>Other Kafka Producer Properties</td>
+<td>&#8211;</td>
+<td>These properties are used to configure the Kafka Producer. Any producer property supported
+by Kafka can be used. The only requirement is to prepend the property name with the prefix
+<tt class="docutils literal"><span class="pre">kafka.producer</span></tt>.
+For example: kafka.producer.linger.ms</td>
+</tr>
+</tbody>
+</table>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">Kafka Sink uses the <tt class="docutils literal"><span class="pre">topic</span></tt> and <tt class="docutils literal"><span class="pre">key</span></tt> properties from the FlumeEvent headers to send events to Kafka.
+If <tt class="docutils literal"><span class="pre">topic</span></tt> exists in the headers, the event will be sent to that specific topic, overriding the topic configured for the Sink.
+If <tt class="docutils literal"><span class="pre">key</span></tt> exists in the headers, the key will used by Kafka to partition the data between the topic partitions. Events with same key
+will be sent to the same partition. If the key is null, events will be sent to random partitions.</p>
+</div>
+<p>The Kafka sink also provides defaults for the key.serializer(org.apache.kafka.common.serialization.StringSerializer)
+and value.serializer(org.apache.kafka.common.serialization.ByteArraySerializer). Modification of these parameters is not recommended.</p>
+<p>Deprecated Properties</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="13%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>brokerList</td>
+<td>&#8211;</td>
+<td>Use kafka.bootstrap.servers</td>
+</tr>
+<tr class="row-odd"><td>topic</td>
+<td>default-flume-topic</td>
+<td>Use kafka.topic</td>
+</tr>
+<tr class="row-even"><td>batchSize</td>
+<td>100</td>
+<td>Use kafka.flumeBatchSize</td>
+</tr>
+<tr class="row-odd"><td>requiredAcks</td>
+<td>1</td>
+<td>Use kafka.producer.acks</td>
+</tr>
+</tbody>
+</table>
+<p>An example configuration of a Kafka sink is given below. Properties starting
+with the prefix <tt class="docutils literal"><span class="pre">kafka.producer</span></tt> the Kafka producer. The properties that are passed when creating the Kafka
+producer are not limited to the properties given in this example.
+Also it is possible to include your custom properties here and access them inside
+the preprocessor through the Flume Context object passed in as a method
+argument.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.kafka.KafkaSink</span>
+<span class="na">a1.sinks.k1.kafka.topic</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sinks.k1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">localhost:9092</span>
+<span class="na">a1.sinks.k1.kafka.flumeBatchSize</span> <span class="o">=</span> <span class="s">20</span>
+<span class="na">a1.sinks.k1.kafka.producer.acks</span> <span class="o">=</span> <span class="s">1</span>
+<span class="na">a1.sinks.k1.kafka.producer.linger.ms</span> <span class="o">=</span> <span class="s">1</span>
+<span class="na">a1.sinks.k1.kafka.producer.compression.type</span> <span class="o">=</span> <span class="s">snappy</span>
+</pre></div>
+</div>
+<p><strong>Security and Kafka Sink:</strong></p>
+<p>Secure authentication as well as data encryption is supported on the communication channel between Flume and Kafka.
+For secure authentication SASL/GSSAPI (Kerberos V5) or SSL (even though the parameter is named SSL, the actual protocol is a TLS implementation) can be used from Kafka version 0.9.0.</p>
+<p>As of now data encryption is solely provided by SSL/TLS.</p>
+<p>Setting <tt class="docutils literal"><span class="pre">kafka.producer.security.protocol</span></tt> to any of the following value means:</p>
+<ul class="simple">
+<li><strong>SASL_PLAINTEXT</strong> - Kerberos or plaintext authentication with no data encryption</li>
+<li><strong>SASL_SSL</strong> - Kerberos or plaintext authentication with data encryption</li>
+<li><strong>SSL</strong> - TLS based encryption with optional authentication.</li>
+</ul>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">There is a performance degradation when SSL is enabled,
+the magnitude of which depends on the CPU type and the JVM implementation.
+Reference: <a class="reference external" href="http://kafka.apache.org/documentation#security_overview">Kafka security overview</a>
+and the jira for tracking this issue:
+<a class="reference external" href="https://issues.apache.org/jira/browse/KAFKA-2561">KAFKA-2561</a></p>
+</div>
+<p><strong>TLS and Kafka Sink:</strong></p>
+<p>Please read the steps described in <a class="reference external" href="http://kafka.apache.org/documentation#security_configclients">Configuring Kafka Clients SSL</a>
+to learn about additional configuration settings for fine tuning for example any of the following:
+security provider, cipher suites, enabled protocols, truststore or keystore types.</p>
+<p>Example configuration with server side authentication and data encryption.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.sink1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.kafka.KafkaSink</span>
+<span class="na">a1.sinks.sink1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.sinks.sink1.kafka.topic</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sinks.sink1.kafka.producer.security.protocol</span> <span class="o">=</span> <span class="s">SSL</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.sinks.sink1.kafka.producer.ssl.truststore.location</span> <span class="o">=</span> <span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.sinks.sink1.kafka.producer.ssl.truststore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the truststore&gt;</span>
+</pre></div>
+</div>
+<p>Specyfing the truststore is optional here, the global truststore can be used instead.
+For more details about the global SSL setup, see the <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section.</p>
+<p>Note: By default the property <tt class="docutils literal"><span class="pre">ssl.endpoint.identification.algorithm</span></tt>
+is not defined, so hostname verification is not performed.
+In order to enable hostname verification, set the following properties</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.sink1.kafka.producer.ssl.endpoint.identification.algorithm</span> <span class="o">=</span> <span class="s">HTTPS</span>
+</pre></div>
+</div>
+<p>Once enabled, clients will verify the server&#8217;s fully qualified domain name (FQDN)
+against one of the following two fields:</p>
+<ol class="arabic simple">
+<li>Common Name (CN) <a class="reference external" href="https://tools.ietf.org/html/rfc6125#section-2.3">https://tools.ietf.org/html/rfc6125#section-2.3</a></li>
+<li>Subject Alternative Name (SAN) <a class="reference external" href="https://tools.ietf.org/html/rfc5280#section-4.2.1.6">https://tools.ietf.org/html/rfc5280#section-4.2.1.6</a></li>
+</ol>
+<p>If client side authentication is also required then additionally the following needs to be added to Flume agent
+configuration or the global SSL setup can be used (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section).
+Each Flume agent has to have its client certificate which has to be trusted by Kafka brokers either
+individually or by their signature chain. Common example is to sign each client certificate by a single Root CA
+which in turn is trusted by Kafka brokers.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># optional, the global keystore can be used alternatively</span>
+<span class="na">a1.sinks.sink1.kafka.producer.ssl.keystore.location</span> <span class="o">=</span> <span class="s">/path/to/client.keystore.jks</span>
+<span class="na">a1.sinks.sink1.kafka.producer.ssl.keystore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the keystore&gt;</span>
+</pre></div>
+</div>
+<p>If keystore and key use different password protection then <tt class="docutils literal"><span class="pre">ssl.key.password</span></tt> property will
+provide the required additional secret for producer keystore:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.sink1.kafka.producer.ssl.key.password</span> <span class="o">=</span> <span class="s">&lt;password to access the key&gt;</span>
+</pre></div>
+</div>
+<p><strong>Kerberos and Kafka Sink:</strong></p>
+<p>To use Kafka sink with a Kafka cluster secured with Kerberos, set the <tt class="docutils literal"><span class="pre">producer.security.protocol</span></tt> property noted above for producer.
+The Kerberos keytab and principal to be used with Kafka brokers is specified in a JAAS file&#8217;s &#8220;KafkaClient&#8221; section. &#8220;Client&#8221; section describes the Zookeeper connection if needed.
+See <a class="reference external" href="http://kafka.apache.org/documentation.html#security_sasl_clientconfig">Kafka doc</a>
+for information on the JAAS file contents. The location of this JAAS file and optionally the system wide kerberos configuration can be specified via JAVA_OPTS in flume-env.sh:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djava.security.krb5.conf=/path/to/krb5.conf&quot;</span>
+<span class="na">JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djava.security.auth.login.config=/path/to/flume_jaas.conf&quot;</span>
+</pre></div>
+</div>
+<p>Example secure configuration using SASL_PLAINTEXT:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.sink1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.kafka.KafkaSink</span>
+<span class="na">a1.sinks.sink1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.sinks.sink1.kafka.topic</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sinks.sink1.kafka.producer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_PLAINTEXT</span>
+<span class="na">a1.sinks.sink1.kafka.producer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.sinks.sink1.kafka.producer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+</pre></div>
+</div>
+<p>Example secure configuration using SASL_SSL:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.sink1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.kafka.KafkaSink</span>
+<span class="na">a1.sinks.sink1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.sinks.sink1.kafka.topic</span> <span class="o">=</span> <span class="s">mytopic</span>
+<span class="na">a1.sinks.sink1.kafka.producer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_SSL</span>
+<span class="na">a1.sinks.sink1.kafka.producer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.sinks.sink1.kafka.producer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.sinks.sink1.kafka.producer.ssl.truststore.location</span> <span class="o">=</span> <span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.sinks.sink1.kafka.producer.ssl.truststore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the truststore&gt;</span>
+</pre></div>
+</div>
+<p>Sample JAAS file. For reference of its content please see client config sections of the desired authentication mechanism (GSSAPI/PLAIN)
+in Kafka documentation of <a class="reference external" href="http://kafka.apache.org/documentation#security_sasl_clientconfig">SASL configuration</a>.
+Unlike the Kafka Source or Kafka Channel a &#8220;Client&#8221; section is not required, unless it is needed by other connecting components. Also please make sure
+that the operating system user of the Flume processes has read privileges on the jaas and keytab files.</p>
+<div class="highlight-javascript"><div class="highlight"><pre><span class="nx">KafkaClient</span> <span class="p">{</span>
+  <span class="nx">com</span><span class="p">.</span><span class="nb">sun</span><span class="p">.</span><span class="nx">security</span><span class="p">.</span><span class="nx">auth</span><span class="p">.</span><span class="nx">module</span><span class="p">.</span><span class="nx">Krb5LoginModule</span> <span class="nx">required</span>
+  <span class="nx">useKeyTab</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">storeKey</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">keyTab</span><span class="o">=</span><span class="s2">&quot;/path/to/keytabs/flume.keytab&quot;</span>
+  <span class="nx">principal</span><span class="o">=</span><span class="s2">&quot;flume/flumehost1.example.com@YOURKERBEROSREALM&quot;</span><span class="p">;</span>
+<span class="p">};</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="http-sink">
+<h4>HTTP Sink<a class="headerlink" href="#http-sink" title="Permalink to this headline">¶</a></h4>
+<p>Behaviour of this sink is that it will take events from the channel, and
+send those events to a remote service using an HTTP POST request. The event
+content is sent as the POST body.</p>
+<p>Error handling behaviour of this sink depends on the HTTP response returned
+by the target server. The sink backoff/ready status is configurable, as is the
+transaction commit/rollback result and whether the event contributes to the
+successful event drain count.</p>
+<p>Any malformed HTTP response returned by the server where the status code is
+not readable will result in a backoff signal and the event is not consumed
+from the channel.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="12%" />
+<col width="70%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">http</span></tt>.</td>
+</tr>
+<tr class="row-even"><td><strong>endpoint</strong></td>
+<td>&#8211;</td>
+<td>The fully qualified URL endpoint to POST to</td>
+</tr>
+<tr class="row-odd"><td>connectTimeout</td>
+<td>5000</td>
+<td>The socket connection timeout in milliseconds</td>
+</tr>
+<tr class="row-even"><td>requestTimeout</td>
+<td>5000</td>
+<td>The maximum request processing time in milliseconds</td>
+</tr>
+<tr class="row-odd"><td>contentTypeHeader</td>
+<td>text/plain</td>
+<td>The HTTP Content-Type header</td>
+</tr>
+<tr class="row-even"><td>acceptHeader</td>
+<td>text/plain</td>
+<td>The HTTP Accept header value</td>
+</tr>
+<tr class="row-odd"><td>defaultBackoff</td>
+<td>true</td>
+<td>Whether to backoff by default on receiving all HTTP status codes</td>
+</tr>
+<tr class="row-even"><td>defaultRollback</td>
+<td>true</td>
+<td>Whether to rollback by default on receiving all HTTP status codes</td>
+</tr>
+<tr class="row-odd"><td>defaultIncrementMetrics</td>
+<td>false</td>
+<td>Whether to increment metrics by default on receiving all HTTP status codes</td>
+</tr>
+<tr class="row-even"><td>backoff.CODE</td>
+<td>&#8211;</td>
+<td>Configures a specific backoff for an individual (i.e. 200) code or a group (i.e. 2XX) code</td>
+</tr>
+<tr class="row-odd"><td>rollback.CODE</td>
+<td>&#8211;</td>
+<td>Configures a specific rollback for an individual (i.e. 200) code or a group (i.e. 2XX) code</td>
+</tr>
+<tr class="row-even"><td>incrementMetrics.CODE</td>
+<td>&#8211;</td>
+<td>Configures a specific metrics increment for an individual (i.e. 200) code or a group (i.e. 2XX) code</td>
+</tr>
+</tbody>
+</table>
+<p>Note that the most specific HTTP status code match is used for the backoff,
+rollback and incrementMetrics configuration options. If there are configuration
+values for both 2XX and 200 status codes, then 200 HTTP codes will use the 200
+value, and all other HTTP codes in the 201-299 range will use the 2XX value.</p>
+<p>Any empty or null events are consumed without any request being made to the
+HTTP endpoint.</p>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">http</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.endpoint</span> <span class="o">=</span> <span class="s">http://localhost:8080/someuri</span>
+<span class="na">a1.sinks.k1.connectTimeout</span> <span class="o">=</span> <span class="s">2000</span>
+<span class="na">a1.sinks.k1.requestTimeout</span> <span class="o">=</span> <span class="s">2000</span>
+<span class="na">a1.sinks.k1.acceptHeader</span> <span class="o">=</span> <span class="s">application/json</span>
+<span class="na">a1.sinks.k1.contentTypeHeader</span> <span class="o">=</span> <span class="s">application/json</span>
+<span class="na">a1.sinks.k1.defaultBackoff</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sinks.k1.defaultRollback</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sinks.k1.defaultIncrementMetrics</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sinks.k1.backoff.4XX</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sinks.k1.rollback.4XX</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sinks.k1.incrementMetrics.4XX</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sinks.k1.backoff.200</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sinks.k1.rollback.200</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sinks.k1.incrementMetrics.200</span> <span class="o">=</span> <span class="s">true</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="custom-sink">
+<h4>Custom Sink<a class="headerlink" href="#custom-sink" title="Permalink to this headline">¶</a></h4>
+<p>A custom sink is your own implementation of the Sink interface. A custom
+sink&#8217;s class and its dependencies must be included in the agent&#8217;s classpath
+when starting the Flume agent. The type of the custom sink is its FQCN.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="11%" />
+<col width="70%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>channel</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be your FQCN</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">org.example.MySink</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="flume-channels">
+<h3>Flume Channels<a class="headerlink" href="#flume-channels" title="Permalink to this headline">¶</a></h3>
+<p>Channels are the repositories where the events are staged on a agent.
+Source adds the events and Sink removes it.</p>
+<div class="section" id="memory-channel">
+<h4>Memory Channel<a class="headerlink" href="#memory-channel" title="Permalink to this headline">¶</a></h4>
+<p>The events are stored in an in-memory queue with configurable max size. It&#8217;s
+ideal for flows that need higher throughput and are prepared to lose the staged
+data in the event of a agent failures.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="13%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">memory</span></tt></td>
+</tr>
+<tr class="row-odd"><td>capacity</td>
+<td>100</td>
+<td>The maximum number of events stored in the channel</td>
+</tr>
+<tr class="row-even"><td>transactionCapacity</td>
+<td>100</td>
+<td>The maximum number of events the channel will take from a source or give to a
+sink per transaction</td>
+</tr>
+<tr class="row-odd"><td>keep-alive</td>
+<td>3</td>
+<td>Timeout in seconds for adding or removing an event</td>
+</tr>
+<tr class="row-even"><td>byteCapacityBufferPercentage</td>
+<td>20</td>
+<td>Defines the percent of buffer between byteCapacity and the estimated total size
+of all events in the channel, to account for data in headers. See below.</td>
+</tr>
+<tr class="row-odd"><td>byteCapacity</td>
+<td>see description</td>
+<td>Maximum total <strong>bytes</strong> of memory allowed as a sum of all events in this channel.
+The implementation only counts the Event <tt class="docutils literal"><span class="pre">body</span></tt>, which is the reason for
+providing the <tt class="docutils literal"><span class="pre">byteCapacityBufferPercentage</span></tt> configuration parameter as well.
+Defaults to a computed value equal to 80% of the maximum memory available to
+the JVM (i.e. 80% of the -Xmx value passed on the command line).
+Note that if you have multiple memory channels on a single JVM, and they happen
+to hold the same physical events (i.e. if you are using a replicating channel
+selector from a single source) then those event sizes may be double-counted for
+channel byteCapacity purposes.
+Setting this value to <tt class="docutils literal"><span class="pre">0</span></tt> will cause this value to fall back to a hard
+internal limit of about 200 GB.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">memory</span>
+<span class="na">a1.channels.c1.capacity</span> <span class="o">=</span> <span class="s">10000</span>
+<span class="na">a1.channels.c1.transactionCapacity</span> <span class="o">=</span> <span class="s">10000</span>
+<span class="na">a1.channels.c1.byteCapacityBufferPercentage</span> <span class="o">=</span> <span class="s">20</span>
+<span class="na">a1.channels.c1.byteCapacity</span> <span class="o">=</span> <span class="s">800000</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="jdbc-channel">
+<h4>JDBC Channel<a class="headerlink" href="#jdbc-channel" title="Permalink to this headline">¶</a></h4>
+<p>The events are stored in a persistent storage that&#8217;s backed by a database.
+The JDBC channel currently supports embedded Derby. This is a durable channel
+that&#8217;s ideal for flows where recoverability is important.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="23%" />
+<col width="32%" />
+<col width="44%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">jdbc</span></tt></td>
+</tr>
+<tr class="row-odd"><td>db.type</td>
+<td>DERBY</td>
+<td>Database vendor, needs to be DERBY.</td>
+</tr>
+<tr class="row-even"><td>driver.class</td>
+<td>org.apache.derby.jdbc.EmbeddedDriver</td>
+<td>Class for vendor&#8217;s JDBC driver</td>
+</tr>
+<tr class="row-odd"><td>driver.url</td>
+<td>(constructed from other properties)</td>
+<td>JDBC connection URL</td>
+</tr>
+<tr class="row-even"><td>db.username</td>
+<td>&#8220;sa&#8221;</td>
+<td>User id for db connection</td>
+</tr>
+<tr class="row-odd"><td>db.password</td>
+<td>&#8211;</td>
+<td>password for db connection</td>
+</tr>
+<tr class="row-even"><td>connection.properties.file</td>
+<td>&#8211;</td>
+<td>JDBC Connection property file path</td>
+</tr>
+<tr class="row-odd"><td>create.schema</td>
+<td>true</td>
+<td>If true, then creates db schema if not there</td>
+</tr>
+<tr class="row-even"><td>create.index</td>
+<td>true</td>
+<td>Create indexes to speed up lookups</td>
+</tr>
+<tr class="row-odd"><td>create.foreignkey</td>
+<td>true</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>transaction.isolation</td>
+<td>&#8220;READ_COMMITTED&#8221;</td>
+<td>Isolation level for db session READ_UNCOMMITTED,
+READ_COMMITTED, SERIALIZABLE, REPEATABLE_READ</td>
+</tr>
+<tr class="row-odd"><td>maximum.connections</td>
+<td>10</td>
+<td>Max connections allowed to db</td>
+</tr>
+<tr class="row-even"><td>maximum.capacity</td>
+<td>0 (unlimited)</td>
+<td>Max number of events in the channel</td>
+</tr>
+<tr class="row-odd"><td>sysprop.*</td>
+<td>&nbsp;</td>
+<td>DB Vendor specific properties</td>
+</tr>
+<tr class="row-even"><td>sysprop.user.home</td>
+<td>&nbsp;</td>
+<td>Home path to store embedded Derby database</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">jdbc</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="kafka-channel">
+<h4>Kafka Channel<a class="headerlink" href="#kafka-channel" title="Permalink to this headline">¶</a></h4>
+<p>The events are stored in a Kafka cluster (must be installed separately). Kafka provides high availability and
+replication, so in case an agent or a kafka broker crashes, the events are immediately available to other sinks</p>
+<p>The Kafka channel can be used for multiple scenarios:</p>
+<ol class="arabic simple">
+<li>With Flume source and sink - it provides a reliable and highly available channel for events</li>
+<li>With Flume source and interceptor but no sink - it allows writing Flume events into a Kafka topic, for use by other apps</li>
+<li>With Flume sink, but no source - it is a low-latency, fault tolerant way to send events from Kafka to Flume sinks such as HDFS, HBase or Solr</li>
+</ol>
+<p>This currently supports Kafka server releases 0.10.1.0 or higher. Testing was done up to 2.0.1 that was the highest avilable version at the time of the release.</p>
+<p>The configuration parameters are organized as such:</p>
+<ol class="arabic simple">
+<li>Configuration values related to the channel generically are applied at the channel config level, eg: a1.channel.k1.type =</li>
+<li>Configuration values related to Kafka or how the Channel operates are prefixed with &#8220;kafka.&#8221;, (this are analgous to CommonClient Configs) eg: a1.channels.k1.kafka.topic and a1.channels.k1.kafka.bootstrap.servers. This is not dissimilar to how the hdfs sink operates</li>
+<li>Properties specific to the producer/consumer are prefixed by kafka.producer or kafka.consumer</li>
+<li>Where possible, the Kafka paramter names are used, eg: bootstrap.servers and acks</li>
+</ol>
+<p>This version of flume is backwards-compatible with previous versions, however deprecated properties are indicated in the table below and a warning message
+is logged on startup when they are present in the configuration file.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="19%" />
+<col width="13%" />
+<col width="68%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.channel.kafka.KafkaChannel</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>kafka.bootstrap.servers</strong></td>
+<td>&#8211;</td>
+<td>List of brokers in the Kafka cluster used by the channel
+This can be a partial list of brokers, but we recommend at least two for HA.
+The format is comma separated list of hostname:port</td>
+</tr>
+<tr class="row-even"><td>kafka.topic</td>
+<td>flume-channel</td>
+<td>Kafka topic which the channel will use</td>
+</tr>
+<tr class="row-odd"><td>kafka.consumer.group.id</td>
+<td>flume</td>
+<td>Consumer group ID the channel uses to register with Kafka.
+Multiple channels must use the same topic and group to ensure that when one agent fails another can get the data
+Note that having non-channel consumers with the same ID can lead to data loss.</td>
+</tr>
+<tr class="row-even"><td>parseAsFlumeEvent</td>
+<td>true</td>
+<td>Expecting Avro datums with FlumeEvent schema in the channel.
+This should be true if Flume source is writing to the channel and false if other producers are
+writing into the topic that the channel is using. Flume source messages to Kafka can be parsed outside of Flume by using
+org.apache.flume.source.avro.AvroFlumeEvent provided by the flume-ng-sdk artifact</td>
+</tr>
+<tr class="row-odd"><td>pollTimeout</td>
+<td>500</td>
+<td>The amount of time(in milliseconds) to wait in the &#8220;poll()&#8221; call of the consumer.
+<a class="reference external" href="https://kafka.apache.org/090/javadoc/org/apache/kafka/clients/consumer/KafkaConsumer.html#poll(long">https://kafka.apache.org/090/javadoc/org/apache/kafka/clients/consumer/KafkaConsumer.html#poll(long</a>)</td>
+</tr>
+<tr class="row-even"><td>defaultPartitionId</td>
+<td>&#8211;</td>
+<td>Specifies a Kafka partition ID (integer) for all events in this channel to be sent to, unless
+overriden by <tt class="docutils literal"><span class="pre">partitionIdHeader</span></tt>. By default, if this property is not set, events will be
+distributed by the Kafka Producer&#8217;s partitioner - including by <tt class="docutils literal"><span class="pre">key</span></tt> if specified (or by a
+partitioner specified by <tt class="docutils literal"><span class="pre">kafka.partitioner.class</span></tt>).</td>
+</tr>
+<tr class="row-odd"><td>partitionIdHeader</td>
+<td>&#8211;</td>
+<td>When set, the producer will take the value of the field named using the value of this property
+from the event header and send the message to the specified partition of the topic. If the
+value represents an invalid partition the event will not be accepted into the channel. If the header value
+is present then this setting overrides <tt class="docutils literal"><span class="pre">defaultPartitionId</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>kafka.consumer.auto.offset.reset</td>
+<td>latest</td>
+<td>What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server
+(e.g. because that data has been deleted):
+earliest: automatically reset the offset to the earliest offset
+latest: automatically reset the offset to the latest offset
+none: throw exception to the consumer if no previous offset is found for the consumer&#8217;s group
+anything else: throw exception to the consumer.</td>
+</tr>
+<tr class="row-odd"><td>kafka.producer.security.protocol</td>
+<td>PLAINTEXT</td>
+<td>Set to SASL_PLAINTEXT, SASL_SSL or SSL if writing to Kafka using some level of security. See below for additional info on secure setup.</td>
+</tr>
+<tr class="row-even"><td>kafka.consumer.security.protocol</td>
+<td>PLAINTEXT</td>
+<td>Same as kafka.producer.security.protocol but for reading/consuming from Kafka.</td>
+</tr>
+<tr class="row-odd"><td><em>more producer/consumer security props</em></td>
+<td>&nbsp;</td>
+<td>If using SASL_PLAINTEXT, SASL_SSL or SSL refer to <a class="reference external" href="http://kafka.apache.org/documentation.html#security">Kafka security</a> for additional
+properties that need to be set on producer/consumer.</td>
+</tr>
+</tbody>
+</table>
+<p>Deprecated Properties</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="14%" />
+<col width="68%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>brokerList</td>
+<td>&#8211;</td>
+<td>List of brokers in the Kafka cluster used by the channel
+This can be a partial list of brokers, but we recommend at least two for HA.
+The format is comma separated list of hostname:port</td>
+</tr>
+<tr class="row-odd"><td>topic</td>
+<td>flume-channel</td>
+<td>Use kafka.topic</td>
+</tr>
+<tr class="row-even"><td>groupId</td>
+<td>flume</td>
+<td>Use kafka.consumer.group.id</td>
+</tr>
+<tr class="row-odd"><td>readSmallestOffset</td>
+<td>false</td>
+<td>Use kafka.consumer.auto.offset.reset</td>
+</tr>
+<tr class="row-even"><td>migrateZookeeperOffsets</td>
+<td>true</td>
+<td>When no Kafka stored offset is found, look up the offsets in Zookeeper and commit them to Kafka.
+This should be true to support seamless Kafka client migration from older versions of Flume. Once migrated this can be set
+to false, though that should generally not be required. If no Zookeeper offset is found the kafka.consumer.auto.offset.reset
+configuration defines how offsets are handled.</td>
+</tr>
+</tbody>
+</table>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">Due to the way the channel is load balanced, there may be duplicate events when the agent first starts up</p>
+</div>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.channel1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.channel.kafka.KafkaChannel</span>
+<span class="na">a1.channels.channel1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9092,kafka-2:9092,kafka-3:9092</span>
+<span class="na">a1.channels.channel1.kafka.topic</span> <span class="o">=</span> <span class="s">channel1</span>
+<span class="na">a1.channels.channel1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+</pre></div>
+</div>
+<p><strong>Security and Kafka Channel:</strong></p>
+<p>Secure authentication as well as data encryption is supported on the communication channel between Flume and Kafka.
+For secure authentication SASL/GSSAPI (Kerberos V5) or SSL (even though the parameter is named SSL, the actual protocol is a TLS implementation) can be used from Kafka version 0.9.0.</p>
+<p>As of now data encryption is solely provided by SSL/TLS.</p>
+<p>Setting <tt class="docutils literal"><span class="pre">kafka.producer|consumer.security.protocol</span></tt> to any of the following value means:</p>
+<ul class="simple">
+<li><strong>SASL_PLAINTEXT</strong> - Kerberos or plaintext authentication with no data encryption</li>
+<li><strong>SASL_SSL</strong> - Kerberos or plaintext authentication with data encryption</li>
+<li><strong>SSL</strong> - TLS based encryption with optional authentication.</li>
+</ul>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">There is a performance degradation when SSL is enabled,
+the magnitude of which depends on the CPU type and the JVM implementation.
+Reference: <a class="reference external" href="http://kafka.apache.org/documentation#security_overview">Kafka security overview</a>
+and the jira for tracking this issue:
+<a class="reference external" href="https://issues.apache.org/jira/browse/KAFKA-2561">KAFKA-2561</a></p>
+</div>
+<p><strong>TLS and Kafka Channel:</strong></p>
+<p>Please read the steps described in <a class="reference external" href="http://kafka.apache.org/documentation#security_configclients">Configuring Kafka Clients SSL</a>
+to learn about additional configuration settings for fine tuning for example any of the following:
+security provider, cipher suites, enabled protocols, truststore or keystore types.</p>
+<p>Example configuration with server side authentication and data encryption.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.channel1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.channel.kafka.KafkaChannel</span>
+<span class="na">a1.channels.channel1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.channels.channel1.kafka.topic</span> <span class="o">=</span> <span class="s">channel1</span>
+<span class="na">a1.channels.channel1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+<span class="na">a1.channels.channel1.kafka.producer.security.protocol</span> <span class="o">=</span> <span class="s">SSL</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.channels.channel1.kafka.producer.ssl.truststore.location</span> <span class="o">=</span> <span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.channels.channel1.kafka.producer.ssl.truststore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the truststore&gt;</span>
+<span class="na">a1.channels.channel1.kafka.consumer.security.protocol</span> <span class="o">=</span> <span class="s">SSL</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.truststore.location</span> <span class="o">=</span> <span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.truststore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the truststore&gt;</span>
+</pre></div>
+</div>
+<p>Specyfing the truststore is optional here, the global truststore can be used instead.
+For more details about the global SSL setup, see the <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section.</p>
+<p>Note: By default the property <tt class="docutils literal"><span class="pre">ssl.endpoint.identification.algorithm</span></tt>
+is not defined, so hostname verification is not performed.
+In order to enable hostname verification, set the following properties</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.channel1.kafka.producer.ssl.endpoint.identification.algorithm</span> <span class="o">=</span> <span class="s">HTTPS</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.endpoint.identification.algorithm</span> <span class="o">=</span> <span class="s">HTTPS</span>
+</pre></div>
+</div>
+<p>Once enabled, clients will verify the server&#8217;s fully qualified domain name (FQDN)
+against one of the following two fields:</p>
+<ol class="arabic simple">
+<li>Common Name (CN) <a class="reference external" href="https://tools.ietf.org/html/rfc6125#section-2.3">https://tools.ietf.org/html/rfc6125#section-2.3</a></li>
+<li>Subject Alternative Name (SAN) <a class="reference external" href="https://tools.ietf.org/html/rfc5280#section-4.2.1.6">https://tools.ietf.org/html/rfc5280#section-4.2.1.6</a></li>
+</ol>
+<p>If client side authentication is also required then additionally the following needs to be added to Flume agent
+configuration or the global SSL setup can be used (see <a class="reference internal" href="#ssl-tls-support">SSL/TLS support</a> section).
+Each Flume agent has to have its client certificate which has to be trusted by Kafka brokers either
+individually or by their signature chain. Common example is to sign each client certificate by a single Root CA
+which in turn is trusted by Kafka brokers.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c"># optional, the global keystore can be used alternatively</span>
+<span class="na">a1.channels.channel1.kafka.producer.ssl.keystore.location</span> <span class="o">=</span> <span class="s">/path/to/client.keystore.jks</span>
+<span class="na">a1.channels.channel1.kafka.producer.ssl.keystore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the keystore&gt;</span>
+<span class="c"># optional, the global keystore can be used alternatively</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.keystore.location</span> <span class="o">=</span> <span class="s">/path/to/client.keystore.jks</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.keystore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the keystore&gt;</span>
+</pre></div>
+</div>
+<p>If keystore and key use different password protection then <tt class="docutils literal"><span class="pre">ssl.key.password</span></tt> property will
+provide the required additional secret for both consumer and producer keystores:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.channel1.kafka.producer.ssl.key.password</span> <span class="o">=</span> <span class="s">&lt;password to access the key&gt;</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.key.password</span> <span class="o">=</span> <span class="s">&lt;password to access the key&gt;</span>
+</pre></div>
+</div>
+<p><strong>Kerberos and Kafka Channel:</strong></p>
+<p>To use Kafka channel with a Kafka cluster secured with Kerberos, set the <tt class="docutils literal"><span class="pre">producer/consumer.security.protocol</span></tt> properties noted above for producer and/or consumer.
+The Kerberos keytab and principal to be used with Kafka brokers is specified in a JAAS file&#8217;s &#8220;KafkaClient&#8221; section. &#8220;Client&#8221; section describes the Zookeeper connection if needed.
+See <a class="reference external" href="http://kafka.apache.org/documentation.html#security_sasl_clientconfig">Kafka doc</a>
+for information on the JAAS file contents. The location of this JAAS file and optionally the system wide kerberos configuration can be specified via JAVA_OPTS in flume-env.sh:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djava.security.krb5.conf=/path/to/krb5.conf&quot;</span>
+<span class="na">JAVA_OPTS</span><span class="o">=</span><span class="s">&quot;$JAVA_OPTS -Djava.security.auth.login.config=/path/to/flume_jaas.conf&quot;</span>
+</pre></div>
+</div>
+<p>Example secure configuration using SASL_PLAINTEXT:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.channel1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.channel.kafka.KafkaChannel</span>
+<span class="na">a1.channels.channel1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.channels.channel1.kafka.topic</span> <span class="o">=</span> <span class="s">channel1</span>
+<span class="na">a1.channels.channel1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+<span class="na">a1.channels.channel1.kafka.producer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_PLAINTEXT</span>
+<span class="na">a1.channels.channel1.kafka.producer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.channels.channel1.kafka.producer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+<span class="na">a1.channels.channel1.kafka.consumer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_PLAINTEXT</span>
+<span class="na">a1.channels.channel1.kafka.consumer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.channels.channel1.kafka.consumer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+</pre></div>
+</div>
+<p>Example secure configuration using SASL_SSL:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.channel1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.channel.kafka.KafkaChannel</span>
+<span class="na">a1.channels.channel1.kafka.bootstrap.servers</span> <span class="o">=</span> <span class="s">kafka-1:9093,kafka-2:9093,kafka-3:9093</span>
+<span class="na">a1.channels.channel1.kafka.topic</span> <span class="o">=</span> <span class="s">channel1</span>
+<span class="na">a1.channels.channel1.kafka.consumer.group.id</span> <span class="o">=</span> <span class="s">flume-consumer</span>
+<span class="na">a1.channels.channel1.kafka.producer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_SSL</span>
+<span class="na">a1.channels.channel1.kafka.producer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.channels.channel1.kafka.producer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.channels.channel1.kafka.producer.ssl.truststore.location</span> <span class="o">=</span> <span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.channels.channel1.kafka.producer.ssl.truststore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the truststore&gt;</span>
+<span class="na">a1.channels.channel1.kafka.consumer.security.protocol</span> <span class="o">=</span> <span class="s">SASL_SSL</span>
+<span class="na">a1.channels.channel1.kafka.consumer.sasl.mechanism</span> <span class="o">=</span> <span class="s">GSSAPI</span>
+<span class="na">a1.channels.channel1.kafka.consumer.sasl.kerberos.service.name</span> <span class="o">=</span> <span class="s">kafka</span>
+<span class="c"># optional, the global truststore can be used alternatively</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.truststore.location</span> <span class="o">=</span> <span class="s">/path/to/truststore.jks</span>
+<span class="na">a1.channels.channel1.kafka.consumer.ssl.truststore.password</span> <span class="o">=</span> <span class="s">&lt;password to access the truststore&gt;</span>
+</pre></div>
+</div>
+<p>Sample JAAS file. For reference of its content please see client config sections of the desired authentication mechanism (GSSAPI/PLAIN)
+in Kafka documentation of <a class="reference external" href="http://kafka.apache.org/documentation#security_sasl_clientconfig">SASL configuration</a>.
+Since the Kafka Source may also connect to Zookeeper for offset migration, the &#8220;Client&#8221; section was also added to this example.
+This won&#8217;t be needed unless you require offset migration, or you require this section for other secure components.
+Also please make sure that the operating system user of the Flume processes has read privileges on the jaas and keytab files.</p>
+<div class="highlight-javascript"><div class="highlight"><pre><span class="nx">Client</span> <span class="p">{</span>
+  <span class="nx">com</span><span class="p">.</span><span class="nb">sun</span><span class="p">.</span><span class="nx">security</span><span class="p">.</span><span class="nx">auth</span><span class="p">.</span><span class="nx">module</span><span class="p">.</span><span class="nx">Krb5LoginModule</span> <span class="nx">required</span>
+  <span class="nx">useKeyTab</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">storeKey</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">keyTab</span><span class="o">=</span><span class="s2">&quot;/path/to/keytabs/flume.keytab&quot;</span>
+  <span class="nx">principal</span><span class="o">=</span><span class="s2">&quot;flume/flumehost1.example.com@YOURKERBEROSREALM&quot;</span><span class="p">;</span>
+<span class="p">};</span>
+
+<span class="nx">KafkaClient</span> <span class="p">{</span>
+  <span class="nx">com</span><span class="p">.</span><span class="nb">sun</span><span class="p">.</span><span class="nx">security</span><span class="p">.</span><span class="nx">auth</span><span class="p">.</span><span class="nx">module</span><span class="p">.</span><span class="nx">Krb5LoginModule</span> <span class="nx">required</span>
+  <span class="nx">useKeyTab</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">storeKey</span><span class="o">=</span><span class="kc">true</span>
+  <span class="nx">keyTab</span><span class="o">=</span><span class="s2">&quot;/path/to/keytabs/flume.keytab&quot;</span>
+  <span class="nx">principal</span><span class="o">=</span><span class="s2">&quot;flume/flumehost1.example.com@YOURKERBEROSREALM&quot;</span><span class="p">;</span>
+<span class="p">};</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="file-channel">
+<h4>File Channel<a class="headerlink" href="#file-channel" title="Permalink to this headline">¶</a></h4>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="13%" />
+<col width="67%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name         Default</th>
+<th class="head">Description</th>
+<th class="head">&nbsp;</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">file</span></tt>.</td>
+</tr>
+<tr class="row-odd"><td>checkpointDir</td>
+<td>~/.flume/file-channel/checkpoint</td>
+<td>The directory where checkpoint file will be stored</td>
+</tr>
+<tr class="row-even"><td>useDualCheckpoints</td>
+<td>false</td>
+<td>Backup the checkpoint. If this is set to <tt class="docutils literal"><span class="pre">true</span></tt>, <tt class="docutils literal"><span class="pre">backupCheckpointDir</span></tt> <strong>must</strong> be set</td>
+</tr>
+<tr class="row-odd"><td>backupCheckpointDir</td>
+<td>&#8211;</td>
+<td>The directory where the checkpoint is backed up to. This directory <strong>must not</strong> be the same as the data directories or the checkpoint directory</td>
+</tr>
+<tr class="row-even"><td>dataDirs</td>
+<td>~/.flume/file-channel/data</td>
+<td>Comma separated list of directories for storing log files. Using multiple directories on separate disks can improve file channel peformance</td>
+</tr>
+<tr class="row-odd"><td>transactionCapacity</td>
+<td>10000</td>
+<td>The maximum size of transaction supported by the channel</td>
+</tr>
+<tr class="row-even"><td>checkpointInterval</td>
+<td>30000</td>
+<td>Amount of time (in millis) between checkpoints</td>
+</tr>
+<tr class="row-odd"><td>maxFileSize</td>
+<td>2146435071</td>
+<td>Max size (in bytes) of a single log file</td>
+</tr>
+<tr class="row-even"><td>minimumRequiredSpace</td>
+<td>524288000</td>
+<td>Minimum Required free space (in bytes). To avoid data corruption, File Channel stops accepting take/put requests when free space drops below this value</td>
+</tr>
+<tr class="row-odd"><td>capacity</td>
+<td>1000000</td>
+<td>Maximum capacity of the channel</td>
+</tr>
+<tr class="row-even"><td>keep-alive</td>
+<td>3</td>
+<td>Amount of time (in sec) to wait for a put operation</td>
+</tr>
+<tr class="row-odd"><td>use-log-replay-v1</td>
+<td>false</td>
+<td>Expert: Use old replay logic</td>
+</tr>
+<tr class="row-even"><td>use-fast-replay</td>
+<td>false</td>
+<td>Expert: Replay without using queue</td>
+</tr>
+<tr class="row-odd"><td>checkpointOnClose</td>
+<td>true</td>
+<td>Controls if a checkpoint is created when the channel is closed. Creating a checkpoint on close speeds up subsequent startup of the file channel by avoiding replay.</td>
+</tr>
+<tr class="row-even"><td>encryption.activeKey</td>
+<td>&#8211;</td>
+<td>Key name used to encrypt new data</td>
+</tr>
+<tr class="row-odd"><td>encryption.cipherProvider</td>
+<td>&#8211;</td>
+<td>Cipher provider type, supported types: AESCTRNOPADDING</td>
+</tr>
+<tr class="row-even"><td>encryption.keyProvider</td>
+<td>&#8211;</td>
+<td>Key provider type, supported types: JCEKSFILE</td>
+</tr>
+<tr class="row-odd"><td>encryption.keyProvider.keyStoreFile</td>
+<td>&#8211;</td>
+<td>Path to the keystore file</td>
+</tr>
+<tr class="row-even"><td>encrpytion.keyProvider.keyStorePasswordFile</td>
+<td>&#8211;</td>
+<td>Path to the keystore password file</td>
+</tr>
+<tr class="row-odd"><td>encryption.keyProvider.keys</td>
+<td>&#8211;</td>
+<td>List of all keys (e.g. history of the activeKey setting)</td>
+</tr>
+<tr class="row-even"><td>encyption.keyProvider.keys.*.passwordFile</td>
+<td>&#8211;</td>
+<td>Path to the optional key password file</td>
+</tr>
+</tbody>
+</table>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">By default the File Channel uses paths for checkpoint and data
+directories that are within the user home as specified above.
+As a result if you have more than one File Channel instances
+active within the agent, only one will be able to lock the
+directories and cause the other channel initialization to fail.
+It is therefore necessary that you provide explicit paths to
+all the configured channels, preferably on different disks.
+Furthermore, as file channel will sync to disk after every commit,
+coupling it with a sink/source that batches events together may
+be necessary to provide good performance where multiple disks are
+not available for checkpoint and data directories.</p>
+</div>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">file</span>
+<span class="na">a1.channels.c1.checkpointDir</span> <span class="o">=</span> <span class="s">/mnt/flume/checkpoint</span>
+<span class="na">a1.channels.c1.dataDirs</span> <span class="o">=</span> <span class="s">/mnt/flume/data</span>
+</pre></div>
+</div>
+<p><strong>Encryption</strong></p>
+<p>Below is a few sample configurations:</p>
+<p>Generating a key with a password seperate from the key store password:</p>
+<div class="highlight-bash"><div class="highlight"><pre>keytool -genseckey -alias key-0 -keypass keyPassword -keyalg AES <span class="se">\</span>
+  -keysize 128 -validity 9000 -keystore test.keystore <span class="se">\</span>
+  -storetype jceks -storepass keyStorePassword
+</pre></div>
+</div>
+<p>Generating a key with the password the same as the key store password:</p>
+<div class="highlight-bash"><div class="highlight"><pre>keytool -genseckey -alias key-1 -keyalg AES -keysize 128 -validity 9000 <span class="se">\</span>
+  -keystore src/test/resources/test.keystore -storetype jceks <span class="se">\</span>
+  -storepass keyStorePassword
+</pre></div>
+</div>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.c1.encryption.activeKey</span> <span class="o">=</span> <span class="s">key-0</span>
+<span class="na">a1.channels.c1.encryption.cipherProvider</span> <span class="o">=</span> <span class="s">AESCTRNOPADDING</span>
+<span class="na">a1.channels.c1.encryption.keyProvider</span> <span class="o">=</span> <span class="s">key-provider-0</span>
+<span class="na">a1.channels.c1.encryption.keyProvider</span> <span class="o">=</span> <span class="s">JCEKSFILE</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keyStoreFile</span> <span class="o">=</span> <span class="s">/path/to/my.keystore</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keyStorePasswordFile</span> <span class="o">=</span> <span class="s">/path/to/my.keystore.password</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keys</span> <span class="o">=</span> <span class="s">key-0</span>
+</pre></div>
+</div>
+<p>Let&#8217;s say you have aged key-0 out and new files should be encrypted with key-1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.c1.encryption.activeKey</span> <span class="o">=</span> <span class="s">key-1</span>
+<span class="na">a1.channels.c1.encryption.cipherProvider</span> <span class="o">=</span> <span class="s">AESCTRNOPADDING</span>
+<span class="na">a1.channels.c1.encryption.keyProvider</span> <span class="o">=</span> <span class="s">JCEKSFILE</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keyStoreFile</span> <span class="o">=</span> <span class="s">/path/to/my.keystore</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keyStorePasswordFile</span> <span class="o">=</span> <span class="s">/path/to/my.keystore.password</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keys</span> <span class="o">=</span> <span class="s">key-0 key-1</span>
+</pre></div>
+</div>
+<p>The same scenerio as above, however key-0 has its own password:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels.c1.encryption.activeKey</span> <span class="o">=</span> <span class="s">key-1</span>
+<span class="na">a1.channels.c1.encryption.cipherProvider</span> <span class="o">=</span> <span class="s">AESCTRNOPADDING</span>
+<span class="na">a1.channels.c1.encryption.keyProvider</span> <span class="o">=</span> <span class="s">JCEKSFILE</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keyStoreFile</span> <span class="o">=</span> <span class="s">/path/to/my.keystore</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keyStorePasswordFile</span> <span class="o">=</span> <span class="s">/path/to/my.keystore.password</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keys</span> <span class="o">=</span> <span class="s">key-0 key-1</span>
+<span class="na">a1.channels.c1.encryption.keyProvider.keys.key-0.passwordFile</span> <span class="o">=</span> <span class="s">/path/to/key-0.password</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="spillable-memory-channel">
+<h4>Spillable Memory Channel<a class="headerlink" href="#spillable-memory-channel" title="Permalink to this headline">¶</a></h4>
+<p>The events are stored in an in-memory queue and on disk. The in-memory queue serves as the primary store and the disk as overflow.
+The disk store is managed using an embedded File channel. When the in-memory queue is full, additional incoming events are stored in
+the file channel. This channel is ideal for flows that need high throughput of memory channel during normal operation, but at the
+same time need the larger capacity of the file channel for better tolerance of intermittent sink side outages or drop in drain rates.
+The throughput will reduce approximately to file channel speeds during such abnormal situations. In case of an agent crash or restart,
+only the events stored on disk are recovered when the agent comes online. <strong>This channel is currently experimental and
+not recommended for use in production.</strong></p>
+<p>Required properties are in <strong>bold</strong>. Please refer to file channel for additional required properties.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="18%" />
+<col width="10%" />
+<col width="72%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">SPILLABLEMEMORY</span></tt></td>
+</tr>
+<tr class="row-odd"><td>memoryCapacity</td>
+<td>10000</td>
+<td>Maximum number of events stored in memory queue. To disable use of in-memory queue, set this to zero.</td>
+</tr>
+<tr class="row-even"><td>overflowCapacity</td>
+<td>100000000</td>
+<td>Maximum number of events stored in overflow disk (i.e File channel). To disable use of overflow, set this to zero.</td>
+</tr>
+<tr class="row-odd"><td>overflowTimeout</td>
+<td>3</td>
+<td>The number of seconds to wait before enabling disk overflow when memory fills up.</td>
+</tr>
+<tr class="row-even"><td>byteCapacityBufferPercentage</td>
+<td>20</td>
+<td>Defines the percent of buffer between byteCapacity and the estimated total size
+of all events in the channel, to account for data in headers. See below.</td>
+</tr>
+<tr class="row-odd"><td>byteCapacity</td>
+<td>see description</td>
+<td>Maximum <strong>bytes</strong> of memory allowed as a sum of all events in the memory queue.
+The implementation only counts the Event <tt class="docutils literal"><span class="pre">body</span></tt>, which is the reason for
+providing the <tt class="docutils literal"><span class="pre">byteCapacityBufferPercentage</span></tt> configuration parameter as well.
+Defaults to a computed value equal to 80% of the maximum memory available to
+the JVM (i.e. 80% of the -Xmx value passed on the command line).
+Note that if you have multiple memory channels on a single JVM, and they happen
+to hold the same physical events (i.e. if you are using a replicating channel
+selector from a single source) then those event sizes may be double-counted for
+channel byteCapacity purposes.
+Setting this value to <tt class="docutils literal"><span class="pre">0</span></tt> will cause this value to fall back to a hard
+internal limit of about 200 GB.</td>
+</tr>
+<tr class="row-even"><td>avgEventSize</td>
+<td>500</td>
+<td>Estimated average size of events, in bytes, going into the channel</td>
+</tr>
+<tr class="row-odd"><td>&lt;file channel properties&gt;</td>
+<td>see file channel</td>
+<td>Any file channel property with the exception of &#8216;keep-alive&#8217; and &#8216;capacity&#8217; can be used.
+The keep-alive of file channel is managed by Spillable Memory Channel. Use &#8216;overflowCapacity&#8217;
+to set the File channel&#8217;s capacity.</td>
+</tr>
+</tbody>
+</table>
+<p>In-memory queue is considered full if either memoryCapacity or byteCapacity limit is reached.</p>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">SPILLABLEMEMORY</span>
+<span class="na">a1.channels.c1.memoryCapacity</span> <span class="o">=</span> <span class="s">10000</span>
+<span class="na">a1.channels.c1.overflowCapacity</span> <span class="o">=</span> <span class="s">1000000</span>
+<span class="na">a1.channels.c1.byteCapacity</span> <span class="o">=</span> <span class="s">800000</span>
+<span class="na">a1.channels.c1.checkpointDir</span> <span class="o">=</span> <span class="s">/mnt/flume/checkpoint</span>
+<span class="na">a1.channels.c1.dataDirs</span> <span class="o">=</span> <span class="s">/mnt/flume/data</span>
+</pre></div>
+</div>
+<p>To disable the use of the in-memory queue and function like a file channel:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">SPILLABLEMEMORY</span>
+<span class="na">a1.channels.c1.memoryCapacity</span> <span class="o">=</span> <span class="s">0</span>
+<span class="na">a1.channels.c1.overflowCapacity</span> <span class="o">=</span> <span class="s">1000000</span>
+<span class="na">a1.channels.c1.checkpointDir</span> <span class="o">=</span> <span class="s">/mnt/flume/checkpoint</span>
+<span class="na">a1.channels.c1.dataDirs</span> <span class="o">=</span> <span class="s">/mnt/flume/data</span>
+</pre></div>
+</div>
+<p>To disable the use of overflow disk and function purely as a in-memory channel:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">SPILLABLEMEMORY</span>
+<span class="na">a1.channels.c1.memoryCapacity</span> <span class="o">=</span> <span class="s">100000</span>
+<span class="na">a1.channels.c1.overflowCapacity</span> <span class="o">=</span> <span class="s">0</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="pseudo-transaction-channel">
+<h4>Pseudo Transaction Channel<a class="headerlink" href="#pseudo-transaction-channel" title="Permalink to this headline">¶</a></h4>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">The Pseudo Transaction Channel is only for unit testing purposes
+and is NOT meant for production use.</p>
+</div>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="12%" />
+<col width="6%" />
+<col width="81%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">org.apache.flume.channel.PseudoTxnMemoryChannel</span></tt></td>
+</tr>
+<tr class="row-odd"><td>capacity</td>
+<td>50</td>
+<td>The max number of events stored in the channel</td>
+</tr>
+<tr class="row-even"><td>keep-alive</td>
+<td>3</td>
+<td>Timeout in seconds for adding or removing an event</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="custom-channel">
+<h4>Custom Channel<a class="headerlink" href="#custom-channel" title="Permalink to this headline">¶</a></h4>
+<p>A custom channel is your own implementation of the Channel interface. A
+custom channel&#8217;s class and its dependencies must be included in the agent&#8217;s
+classpath when starting the Flume agent. The type of the custom channel is
+its FQCN.
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="15%" />
+<col width="8%" />
+<col width="76%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, needs to be a FQCN</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.channels.c1.type</span> <span class="o">=</span> <span class="s">org.example.MyChannel</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="flume-channel-selectors">
+<h3>Flume Channel Selectors<a class="headerlink" href="#flume-channel-selectors" title="Permalink to this headline">¶</a></h3>
+<p>If the type is not specified, then defaults to &#8220;replicating&#8221;.</p>
+<div class="section" id="replicating-channel-selector-default">
+<h4>Replicating Channel Selector (default)<a class="headerlink" href="#replicating-channel-selector-default" title="Permalink to this headline">¶</a></h4>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="14%" />
+<col width="64%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>selector.type</td>
+<td>replicating</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">replicating</span></tt></td>
+</tr>
+<tr class="row-odd"><td>selector.optional</td>
+<td>&#8211;</td>
+<td>Set of channels to be marked as <tt class="docutils literal"><span class="pre">optional</span></tt></td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1 and it&#8217;s source called r1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1 c2 c3</span>
+<span class="na">a1.sources.r1.selector.type</span> <span class="o">=</span> <span class="s">replicating</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1 c2 c3</span>
+<span class="na">a1.sources.r1.selector.optional</span> <span class="o">=</span> <span class="s">c3</span>
+</pre></div>
+</div>
+<p>In the above configuration, c3 is an optional channel. Failure to write to c3 is
+simply ignored. Since c1 and c2 are not marked optional, failure to write to
+those channels will cause the transaction to fail.</p>
+</div>
+<div class="section" id="load-balancing-channel-selector">
+<h4>Load Balancing Channel Selector<a class="headerlink" href="#load-balancing-channel-selector" title="Permalink to this headline">¶</a></h4>
+<p>Load balancing channel selector provides the ability to load-balance flow over multiple channels. This
+effectively allows the incoming data to be processed on multiple threads. It maintains an indexed list of active channels on which the load must be distributed. Implementation supports distributing load using either via round_robin or random selection mechanisms. The choice of selection mechanism defaults to round_robin type, but can be overridden via configuration.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="20%" />
+<col width="63%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>selector.type</td>
+<td>replicating</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">load_balancing</span></tt></td>
+</tr>
+<tr class="row-odd"><td>selector.policy</td>
+<td><tt class="docutils literal"><span class="pre">round_robin</span></tt></td>
+<td>Selection mechanism. Must be either <tt class="docutils literal"><span class="pre">round_robin</span></tt> or <tt class="docutils literal"><span class="pre">random</span></tt>.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1 and it&#8217;s source called r1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1 c2 c3 c4</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span> <span class="s">c1 c2 c3 c4</span>
+<span class="na">a1.sources.r1.selector.type</span> <span class="o">=</span> <span class="s">load_balancing</span>
+<span class="na">a1.sources.r1.selector.policy</span> <span class="o">=</span> <span class="s">round_robin</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="multiplexing-channel-selector">
+<h4>Multiplexing Channel Selector<a class="headerlink" href="#multiplexing-channel-selector" title="Permalink to this headline">¶</a></h4>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="23%" />
+<col width="58%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>selector.type</td>
+<td>replicating</td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">multiplexing</span></tt></td>
+</tr>
+<tr class="row-odd"><td>selector.header</td>
+<td>flume.selector.header</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>selector.default</td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>selector.mapping.*</td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1 and it&#8217;s source called r1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1 c2 c3 c4</span>
+<span class="na">a1.sources.r1.selector.type</span> <span class="o">=</span> <span class="s">multiplexing</span>
+<span class="na">a1.sources.r1.selector.header</span> <span class="o">=</span> <span class="s">state</span>
+<span class="na">a1.sources.r1.selector.mapping.CZ</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.selector.mapping.US</span> <span class="o">=</span> <span class="s">c2 c3</span>
+<span class="na">a1.sources.r1.selector.default</span> <span class="o">=</span> <span class="s">c4</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="custom-channel-selector">
+<h4>Custom Channel Selector<a class="headerlink" href="#custom-channel-selector" title="Permalink to this headline">¶</a></h4>
+<p>A custom channel selector is your own implementation of the ChannelSelector
+interface. A custom channel selector&#8217;s class and its dependencies must be
+included in the agent&#8217;s classpath when starting the Flume agent. The type of
+the custom channel selector is its FQCN.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="11%" />
+<col width="70%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>selector.type</td>
+<td>&#8211;</td>
+<td>The component type name, needs to be your FQCN</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1 and its source called r1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.selector.type</span> <span class="o">=</span> <span class="s">org.example.MyChannelSelector</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="flume-sink-processors">
+<h3>Flume Sink Processors<a class="headerlink" href="#flume-sink-processors" title="Permalink to this headline">¶</a></h3>
+<p>Sink groups allow users to group multiple sinks into one entity.
+Sink processors can be used to provide load balancing capabilities over all
+sinks inside the group or to achieve fail over from one sink to another in
+case of temporal failure.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="10%" />
+<col width="73%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>sinks</strong></td>
+<td>&#8211;</td>
+<td>Space-separated list of sinks that are participating in the group</td>
+</tr>
+<tr class="row-odd"><td><strong>processor.type</strong></td>
+<td><tt class="docutils literal"><span class="pre">default</span></tt></td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">default</span></tt>, <tt class="docutils literal"><span class="pre">failover</span></tt> or <tt class="docutils literal"><span class="pre">load_balance</span></tt></td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinkgroups</span> <span class="o">=</span> <span class="s">g1</span>
+<span class="na">a1.sinkgroups.g1.sinks</span> <span class="o">=</span> <span class="s">k1 k2</span>
+<span class="na">a1.sinkgroups.g1.processor.type</span> <span class="o">=</span> <span class="s">load_balance</span>
+</pre></div>
+</div>
+<div class="section" id="default-sink-processor">
+<h4>Default Sink Processor<a class="headerlink" href="#default-sink-processor" title="Permalink to this headline">¶</a></h4>
+<p>Default sink processor accepts only a single sink. User is not forced
+to create processor (sink group) for single sinks. Instead user can follow
+the source - channel - sink pattern that was explained above in this user
+guide.</p>
+</div>
+<div class="section" id="failover-sink-processor">
+<h4>Failover Sink Processor<a class="headerlink" href="#failover-sink-processor" title="Permalink to this headline">¶</a></h4>
+<p>Failover Sink Processor maintains a prioritized list of sinks, guaranteeing
+that so long as one is available events will be processed (delivered).</p>
+<p>The failover mechanism works by relegating failed sinks to a pool where
+they are assigned a cool down period, increasing with sequential failures
+before they are retried. Once a sink successfully sends an event, it is
+restored to the live pool. The Sinks have a priority associated with them,
+larger the number, higher the priority. If a Sink fails while sending a Event
+the next Sink with highest priority shall be tried next for sending Events.
+For example, a sink with priority 100 is activated before the Sink with priority
+80. If no priority is specified, thr priority is determined based on the order in which
+the Sinks are specified in configuration.</p>
+<p>To configure, set a sink groups processor to <tt class="docutils literal"><span class="pre">failover</span></tt> and set
+priorities for all individual sinks. All specified priorities must
+be unique. Furthermore, upper limit to failover time can be set
+(in milliseconds) using <tt class="docutils literal"><span class="pre">maxpenalty</span></tt> property.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="23%" />
+<col width="8%" />
+<col width="70%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>sinks</strong></td>
+<td>&#8211;</td>
+<td>Space-separated list of sinks that are participating in the group</td>
+</tr>
+<tr class="row-odd"><td><strong>processor.type</strong></td>
+<td><tt class="docutils literal"><span class="pre">default</span></tt></td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">failover</span></tt></td>
+</tr>
+<tr class="row-even"><td><strong>processor.priority.&lt;sinkName&gt;</strong></td>
+<td>&#8211;</td>
+<td>Priority value.  &lt;sinkName&gt; must be one of the sink instances associated with the current sink group
+A higher priority value Sink gets activated earlier. A larger absolute value indicates higher priority</td>
+</tr>
+<tr class="row-odd"><td>processor.maxpenalty</td>
+<td>30000</td>
+<td>The maximum backoff period for the failed Sink (in millis)</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinkgroups</span> <span class="o">=</span> <span class="s">g1</span>
+<span class="na">a1.sinkgroups.g1.sinks</span> <span class="o">=</span> <span class="s">k1 k2</span>
+<span class="na">a1.sinkgroups.g1.processor.type</span> <span class="o">=</span> <span class="s">failover</span>
+<span class="na">a1.sinkgroups.g1.processor.priority.k1</span> <span class="o">=</span> <span class="s">5</span>
+<span class="na">a1.sinkgroups.g1.processor.priority.k2</span> <span class="o">=</span> <span class="s">10</span>
+<span class="na">a1.sinkgroups.g1.processor.maxpenalty</span> <span class="o">=</span> <span class="s">10000</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="load-balancing-sink-processor">
+<h4>Load balancing Sink Processor<a class="headerlink" href="#load-balancing-sink-processor" title="Permalink to this headline">¶</a></h4>
+<p>Load balancing sink processor provides the ability to load-balance flow over
+multiple sinks. It maintains an indexed list of active sinks on which the
+load must be distributed. Implementation supports distributing load using
+either via <tt class="docutils literal"><span class="pre">round_robin</span></tt> or <tt class="docutils literal"><span class="pre">random</span></tt> selection mechanisms.
+The choice of selection mechanism defaults to <tt class="docutils literal"><span class="pre">round_robin</span></tt> type,
+but can be overridden via configuration. Custom selection mechanisms are
+supported via custom classes that inherits from <tt class="docutils literal"><span class="pre">AbstractSinkSelector</span></tt>.</p>
+<p>When invoked, this selector picks the next sink using its configured selection
+mechanism and invokes it. For <tt class="docutils literal"><span class="pre">round_robin</span></tt> and <tt class="docutils literal"><span class="pre">random</span></tt> In case the selected sink
+fails to deliver the event, the processor picks the next available sink via
+its configured selection mechanism. This implementation does not blacklist
+the failing sink and instead continues to optimistically attempt every
+available sink. If all sinks invocations result in failure, the selector
+propagates the failure to the sink runner.</p>
+<p>If <tt class="docutils literal"><span class="pre">backoff</span></tt> is enabled, the sink processor will blacklist
+sinks that fail, removing them for selection for a given timeout. When the
+timeout ends, if the sink is still unresponsive timeout is increased
+exponentially to avoid potentially getting stuck in long waits on unresponsive
+sinks. With this disabled, in round-robin all the failed sinks load will be
+passed to the next sink in line and thus not evenly balanced</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="25%" />
+<col width="13%" />
+<col width="63%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>processor.sinks</strong></td>
+<td>&#8211;</td>
+<td>Space-separated list of sinks that are participating in the group</td>
+</tr>
+<tr class="row-odd"><td><strong>processor.type</strong></td>
+<td><tt class="docutils literal"><span class="pre">default</span></tt></td>
+<td>The component type name, needs to be <tt class="docutils literal"><span class="pre">load_balance</span></tt></td>
+</tr>
+<tr class="row-even"><td>processor.backoff</td>
+<td>false</td>
+<td>Should failed sinks be backed off exponentially.</td>
+</tr>
+<tr class="row-odd"><td>processor.selector</td>
+<td><tt class="docutils literal"><span class="pre">round_robin</span></tt></td>
+<td>Selection mechanism. Must be either <tt class="docutils literal"><span class="pre">round_robin</span></tt>, <tt class="docutils literal"><span class="pre">random</span></tt>
+or FQCN of custom class that inherits from <tt class="docutils literal"><span class="pre">AbstractSinkSelector</span></tt></td>
+</tr>
+<tr class="row-even"><td>processor.selector.maxTimeOut</td>
+<td>30000</td>
+<td>Used by backoff selectors to limit exponential backoff (in milliseconds)</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinkgroups</span> <span class="o">=</span> <span class="s">g1</span>
+<span class="na">a1.sinkgroups.g1.sinks</span> <span class="o">=</span> <span class="s">k1 k2</span>
+<span class="na">a1.sinkgroups.g1.processor.type</span> <span class="o">=</span> <span class="s">load_balance</span>
+<span class="na">a1.sinkgroups.g1.processor.backoff</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">a1.sinkgroups.g1.processor.selector</span> <span class="o">=</span> <span class="s">random</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="custom-sink-processor">
+<h4>Custom Sink Processor<a class="headerlink" href="#custom-sink-processor" title="Permalink to this headline">¶</a></h4>
+<p>Custom sink processors are not supported at the moment.</p>
+</div>
+</div>
+<div class="section" id="event-serializers">
+<h3>Event Serializers<a class="headerlink" href="#event-serializers" title="Permalink to this headline">¶</a></h3>
+<p>The <tt class="docutils literal"><span class="pre">file_roll</span></tt> sink and the <tt class="docutils literal"><span class="pre">hdfs</span></tt> sink both support the
+<tt class="docutils literal"><span class="pre">EventSerializer</span></tt> interface. Details of the EventSerializers that ship with
+Flume are provided below.</p>
+<div class="section" id="body-text-serializer">
+<h4>Body Text Serializer<a class="headerlink" href="#body-text-serializer" title="Permalink to this headline">¶</a></h4>
+<p>Alias: <tt class="docutils literal"><span class="pre">text</span></tt>. This interceptor writes the body of the event to an output
+stream without any transformation or modification. The event headers are
+ignored. Configuration options are as follows:</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="14%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>appendNewline</td>
+<td>true</td>
+<td>Whether a newline will be appended to each event at write time. The default
+of true assumes that events do not contain newlines, for legacy reasons.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">file_roll</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.sink.directory</span> <span class="o">=</span> <span class="s">/var/log/flume</span>
+<span class="na">a1.sinks.k1.sink.serializer</span> <span class="o">=</span> <span class="s">text</span>
+<span class="na">a1.sinks.k1.sink.serializer.appendNewline</span> <span class="o">=</span> <span class="s">false</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="flume-event-avro-event-serializer">
+<h4>&#8220;Flume Event&#8221; Avro Event Serializer<a class="headerlink" href="#flume-event-avro-event-serializer" title="Permalink to this headline">¶</a></h4>
+<p>Alias: <tt class="docutils literal"><span class="pre">avro_event</span></tt>.</p>
+<p>This interceptor serializes Flume events into an Avro container file. The schema used is the same schema used for
+Flume events in the Avro RPC mechanism.</p>
+<p>This serializer inherits from the <tt class="docutils literal"><span class="pre">AbstractAvroEventSerializer</span></tt> class.</p>
+<p>Configuration options are as follows:</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="14%" />
+<col width="64%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>syncIntervalBytes</td>
+<td>2048000</td>
+<td>Avro sync interval, in approximate bytes.</td>
+</tr>
+<tr class="row-odd"><td>compressionCodec</td>
+<td>null</td>
+<td>Avro compression codec. For supported codecs, see Avro&#8217;s CodecFactory docs.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">hdfs</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hdfs.path</span> <span class="o">=</span> <span class="s">/flume/events/%Y-%m-%d/%H%M/%S</span>
+<span class="na">a1.sinks.k1.serializer</span> <span class="o">=</span> <span class="s">avro_event</span>
+<span class="na">a1.sinks.k1.serializer.compressionCodec</span> <span class="o">=</span> <span class="s">snappy</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="avro-event-serializer">
+<h4>Avro Event Serializer<a class="headerlink" href="#avro-event-serializer" title="Permalink to this headline">¶</a></h4>
+<p>Alias: This serializer does not have an alias, and must be specified using the fully-qualified class name class name.</p>
+<p>This serializes Flume events into an Avro container file like the &#8220;Flume Event&#8221; Avro Event Serializer, however the
+record schema is configurable. The record schema may be specified either as a Flume configuration property or passed in an event header.</p>
+<p>To pass the record schema as part of the Flume configuration, use the property <tt class="docutils literal"><span class="pre">schemaURL</span></tt> as listed below.</p>
+<p>To pass the record schema in an event header, specify either the event header <tt class="docutils literal"><span class="pre">flume.avro.schema.literal</span></tt>
+containing a JSON-format representation of the schema or <tt class="docutils literal"><span class="pre">flume.avro.schema.url</span></tt> with a URL where
+the schema may be found (<tt class="docutils literal"><span class="pre">hdfs:/...</span></tt> URIs are supported).</p>
+<p>This serializer inherits from the <tt class="docutils literal"><span class="pre">AbstractAvroEventSerializer</span></tt> class.</p>
+<p>Configuration options are as follows:</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="14%" />
+<col width="64%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>syncIntervalBytes</td>
+<td>2048000</td>
+<td>Avro sync interval, in approximate bytes.</td>
+</tr>
+<tr class="row-odd"><td>compressionCodec</td>
+<td>null</td>
+<td>Avro compression codec. For supported codecs, see Avro&#8217;s CodecFactory docs.</td>
+</tr>
+<tr class="row-even"><td>schemaURL</td>
+<td>null</td>
+<td>Avro schema URL. Schemas specified in the header ovverride this option.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">hdfs</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.hdfs.path</span> <span class="o">=</span> <span class="s">/flume/events/%y-%m-%d/%H%M/%S</span>
+<span class="na">a1.sinks.k1.serializer</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.hdfs.AvroEventSerializer$Builder</span>
+<span class="na">a1.sinks.k1.serializer.compressionCodec</span> <span class="o">=</span> <span class="s">snappy</span>
+<span class="na">a1.sinks.k1.serializer.schemaURL</span> <span class="o">=</span> <span class="s">hdfs://namenode/path/to/schema.avsc</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="flume-interceptors">
+<h3>Flume Interceptors<a class="headerlink" href="#flume-interceptors" title="Permalink to this headline">¶</a></h3>
+<p>Flume has the capability to modify/drop events in-flight. This is done with the help of interceptors. Interceptors
+are classes that implement <tt class="docutils literal"><span class="pre">org.apache.flume.interceptor.Interceptor</span></tt> interface. An interceptor can
+modify or even drop events based on any criteria chosen by the developer of the interceptor. Flume supports
+chaining of interceptors. This is made possible through by specifying the list of interceptor builder class names
+in the configuration. Interceptors are specified as a whitespace separated list in the source configuration.
+The order in which the interceptors are specified is the order in which they are invoked.
+The list of events returned by one interceptor is passed to the next interceptor in the chain. Interceptors
+can modify or drop events. If an interceptor needs to drop events, it just does not return that event in
+the list that it returns. If it is to drop all events, then it simply returns an empty list. Interceptors
+are named components, here is an example of how they are created through configuration:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.interceptors</span> <span class="o">=</span> <span class="s">i1 i2</span>
+<span class="na">a1.sources.r1.interceptors.i1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.interceptor.HostInterceptor$Builder</span>
+<span class="na">a1.sources.r1.interceptors.i1.preserveExisting</span> <span class="o">=</span> <span class="s">false</span>
+<span class="na">a1.sources.r1.interceptors.i1.hostHeader</span> <span class="o">=</span> <span class="s">hostname</span>
+<span class="na">a1.sources.r1.interceptors.i2.type</span> <span class="o">=</span> <span class="s">org.apache.flume.interceptor.TimestampInterceptor$Builder</span>
+<span class="na">a1.sinks.k1.filePrefix</span> <span class="o">=</span> <span class="s">FlumeData.%{CollectorHost}.%Y-%m-%d</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+</pre></div>
+</div>
+<p>Note that the interceptor builders are passed to the type config parameter. The interceptors are themselves
+configurable and can be passed configuration values just like they are passed to any other configurable component.
+In the above example, events are passed to the HostInterceptor first and the events returned by the HostInterceptor
+are then passed along to the TimestampInterceptor. You can specify either the fully qualified class name (FQCN)
+or the alias <tt class="docutils literal"><span class="pre">timestamp</span></tt>. If you have multiple collectors writing to the same HDFS path, then you could also use
+the HostInterceptor.</p>
+<div class="section" id="timestamp-interceptor">
+<h4>Timestamp Interceptor<a class="headerlink" href="#timestamp-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor inserts into the event headers, the time in millis at which it processes the event. This interceptor
+inserts a header with key <tt class="docutils literal"><span class="pre">timestamp</span></tt> (or as specified by the <tt class="docutils literal"><span class="pre">header</span></tt> property) whose value is the relevant timestamp.
+This interceptor can preserve an existing timestamp if it is already present in the configuration.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="16%" />
+<col width="9%" />
+<col width="74%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, has to be <tt class="docutils literal"><span class="pre">timestamp</span></tt> or the FQCN</td>
+</tr>
+<tr class="row-odd"><td>headerName</td>
+<td>timestamp</td>
+<td>The name of the header in which to place the generated timestamp.</td>
+</tr>
+<tr class="row-even"><td>preserveExisting</td>
+<td>false</td>
+<td>If the timestamp already exists, should it be preserved - true or false</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span>  <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">seq</span>
+<span class="na">a1.sources.r1.interceptors</span> <span class="o">=</span> <span class="s">i1</span>
+<span class="na">a1.sources.r1.interceptors.i1.type</span> <span class="o">=</span> <span class="s">timestamp</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="host-interceptor">
+<h4>Host Interceptor<a class="headerlink" href="#host-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor inserts the hostname or IP address of the host that this agent is running on. It inserts a header
+with key <tt class="docutils literal"><span class="pre">host</span></tt> or a configured key whose value is the hostname or IP address of the host, based on configuration.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="7%" />
+<col width="76%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, has to be <tt class="docutils literal"><span class="pre">host</span></tt></td>
+</tr>
+<tr class="row-odd"><td>preserveExisting</td>
+<td>false</td>
+<td>If the host header already exists, should it be preserved - true or false</td>
+</tr>
+<tr class="row-even"><td>useIP</td>
+<td>true</td>
+<td>Use the IP Address if true, else use hostname.</td>
+</tr>
+<tr class="row-odd"><td>hostHeader</td>
+<td>host</td>
+<td>The header key to be used.</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.interceptors</span> <span class="o">=</span> <span class="s">i1</span>
+<span class="na">a1.sources.r1.interceptors.i1.type</span> <span class="o">=</span> <span class="s">host</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="static-interceptor">
+<h4>Static Interceptor<a class="headerlink" href="#static-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>Static interceptor allows user to append a static header with static value to all events.</p>
+<p>The current implementation does not allow specifying multiple headers at one time. Instead user might chain
+multiple static interceptors each defining one static header.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="16%" />
+<col width="7%" />
+<col width="77%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name, has to be <tt class="docutils literal"><span class="pre">static</span></tt></td>
+</tr>
+<tr class="row-odd"><td>preserveExisting</td>
+<td>true</td>
+<td>If configured header already exists, should it be preserved - true or false</td>
+</tr>
+<tr class="row-even"><td>key</td>
+<td>key</td>
+<td>Name of header that should be created</td>
+</tr>
+<tr class="row-odd"><td>value</td>
+<td>value</td>
+<td>Static value that should be created</td>
+</tr>
+</tbody>
+</table>
+<p>Example for agent named a1:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span>  <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">seq</span>
+<span class="na">a1.sources.r1.interceptors</span> <span class="o">=</span> <span class="s">i1</span>
+<span class="na">a1.sources.r1.interceptors.i1.type</span> <span class="o">=</span> <span class="s">static</span>
+<span class="na">a1.sources.r1.interceptors.i1.key</span> <span class="o">=</span> <span class="s">datacenter</span>
+<span class="na">a1.sources.r1.interceptors.i1.value</span> <span class="o">=</span> <span class="s">NEW_YORK</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="remove-header-interceptor">
+<h4>Remove Header Interceptor<a class="headerlink" href="#remove-header-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor manipulates Flume event headers, by removing one or many headers. It can remove a statically defined header, headers based on a regular expression or headers in a list. If none of these is defined, or if no header matches the criteria, the Flume events are not modified.</p>
+<p>Note that if only one header needs to be removed, specifying it by name provides performance benefits over the other 2 methods.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="11%" />
+<col width="6%" />
+<col width="84%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">remove_header</span></tt></td>
+</tr>
+<tr class="row-odd"><td>withName</td>
+<td>&#8211;</td>
+<td>Name of the header to remove</td>
+</tr>
+<tr class="row-even"><td>fromList</td>
+<td>&#8211;</td>
+<td>List of headers to remove, separated with the separator specified by <tt class="docutils literal"><span class="pre">fromListSeparator</span></tt></td>
+</tr>
+<tr class="row-odd"><td>fromListSeparator</td>
+<td>\s*,\s*</td>
+<td>Regular expression used to separate multiple header names in the list specified by <tt class="docutils literal"><span class="pre">fromList</span></tt>. Default is a comma surrounded by any number of whitespace characters</td>
+</tr>
+<tr class="row-even"><td>matching</td>
+<td>&#8211;</td>
+<td>All the headers which names match this regular expression are removed</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="uuid-interceptor">
+<h4>UUID Interceptor<a class="headerlink" href="#uuid-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor sets a universally unique identifier on all events that are intercepted. An example UUID is <tt class="docutils literal"><span class="pre">b5755073-77a9-43c1-8fad-b7a586fc1b97</span></tt>, which represents a 128-bit value.</p>
+<p>Consider using UUIDInterceptor to automatically assign a UUID to an event if no application level unique key for the event is available. It can be important to assign UUIDs to events as soon as they enter the Flume network; that is, in the first Flume Source of the flow. This enables subsequent deduplication of events in the face of replication and redelivery in a Flume network that is designed for high availability and high performance. If an application level key is available, this  [...]
+<table border="1" class="docutils">
+<colgroup>
+<col width="13%" />
+<col width="6%" />
+<col width="81%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">org.apache.flume.sink.solr.morphline.UUIDInterceptor$Builder</span></tt></td>
+</tr>
+<tr class="row-odd"><td>headerName</td>
+<td>id</td>
+<td>The name of the Flume header to modify</td>
+</tr>
+<tr class="row-even"><td>preserveExisting</td>
+<td>true</td>
+<td>If the UUID header already exists, should it be preserved - true or false</td>
+</tr>
+<tr class="row-odd"><td>prefix</td>
+<td>&#8220;&#8221;</td>
+<td>The prefix string constant to prepend to each generated UUID</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="morphline-interceptor">
+<h4>Morphline Interceptor<a class="headerlink" href="#morphline-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor filters the events through a <a class="reference external" href="http://cloudera.github.io/cdk/docs/current/cdk-morphlines/index.html">morphline configuration file</a> that defines a chain of transformation commands that pipe records from one command to another.
+For example the morphline can ignore certain events or alter or insert certain event headers via regular expression based pattern matching, or it can auto-detect and set a MIME type via Apache Tika on events that are intercepted. For example, this kind of packet sniffing can be used for content based dynamic routing in a Flume topology.
+MorphlineInterceptor can also help to implement dynamic routing to multiple Apache Solr collections (e.g. for multi-tenancy).</p>
+<p>Currently, there is a restriction in that the morphline of an interceptor must not generate more than one output record for each input event. This interceptor is not intended for heavy duty ETL processing - if you need this consider moving ETL processing from the Flume Source to a Flume Sink, e.g. to a MorphlineSolrSink.</p>
+<p>Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="10%" />
+<col width="4%" />
+<col width="85%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">org.apache.flume.sink.solr.morphline.MorphlineInterceptor$Builder</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>morphlineFile</strong></td>
+<td>&#8211;</td>
+<td>The relative or absolute path on the local file system to the morphline configuration file. Example: <tt class="docutils literal"><span class="pre">/etc/flume-ng/conf/morphline.conf</span></tt></td>
+</tr>
+<tr class="row-even"><td>morphlineId</td>
+<td>null</td>
+<td>Optional name used to identify a morphline if there are multiple morphlines in a morphline config file</td>
+</tr>
+</tbody>
+</table>
+<p>Sample flume.conf file:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.avroSrc.interceptors</span> <span class="o">=</span> <span class="s">morphlineinterceptor</span>
+<span class="na">a1.sources.avroSrc.interceptors.morphlineinterceptor.type</span> <span class="o">=</span> <span class="s">org.apache.flume.sink.solr.morphline.MorphlineInterceptor$Builder</span>
+<span class="na">a1.sources.avroSrc.interceptors.morphlineinterceptor.morphlineFile</span> <span class="o">=</span> <span class="s">/etc/flume-ng/conf/morphline.conf</span>
+<span class="na">a1.sources.avroSrc.interceptors.morphlineinterceptor.morphlineId</span> <span class="o">=</span> <span class="s">morphline1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="search-and-replace-interceptor">
+<h4>Search and Replace Interceptor<a class="headerlink" href="#search-and-replace-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor provides simple string-based search-and-replace functionality
+based on Java regular expressions. Backtracking / group capture is also available.
+This interceptor uses the same rules as in the Java Matcher.replaceAll() method.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="7%" />
+<col width="76%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">search_replace</span></tt></td>
+</tr>
+<tr class="row-odd"><td>searchPattern</td>
+<td>&#8211;</td>
+<td>The pattern to search for and replace.</td>
+</tr>
+<tr class="row-even"><td>replaceString</td>
+<td>&#8211;</td>
+<td>The replacement string.</td>
+</tr>
+<tr class="row-odd"><td>charset</td>
+<td>UTF-8</td>
+<td>The charset of the event body. Assumed by default to be UTF-8.</td>
+</tr>
+</tbody>
+</table>
+<p>Example configuration:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.avroSrc.interceptors</span> <span class="o">=</span> <span class="s">search-replace</span>
+<span class="na">a1.sources.avroSrc.interceptors.search-replace.type</span> <span class="o">=</span> <span class="s">search_replace</span>
+
+<span class="c"># Remove leading alphanumeric characters in an event body.</span>
+<span class="na">a1.sources.avroSrc.interceptors.search-replace.searchPattern</span> <span class="o">=</span> <span class="s">^[A-Za-z0-9_]+</span>
+<span class="na">a1.sources.avroSrc.interceptors.search-replace.replaceString</span> <span class="o">=</span>
+</pre></div>
+</div>
+<p>Another example:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.avroSrc.interceptors</span> <span class="o">=</span> <span class="s">search-replace</span>
+<span class="na">a1.sources.avroSrc.interceptors.search-replace.type</span> <span class="o">=</span> <span class="s">search_replace</span>
+
+<span class="c"># Use grouping operators to reorder and munge words on a line.</span>
+<span class="na">a1.sources.avroSrc.interceptors.search-replace.searchPattern</span> <span class="o">=</span> <span class="s">The quick brown ([a-z]+) jumped over the lazy ([a-z]+)</span>
+<span class="na">a1.sources.avroSrc.interceptors.search-replace.replaceString</span> <span class="o">=</span> <span class="s">The hungry $2 ate the careless $1</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="regex-filtering-interceptor">
+<h4>Regex Filtering Interceptor<a class="headerlink" href="#regex-filtering-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor filters events selectively by interpreting the event body as text and matching the text against a configured regular expression.
+The supplied regular expression can be used to include events or exclude events.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%" />
+<col width="7%" />
+<col width="76%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">regex_filter</span></tt></td>
+</tr>
+<tr class="row-odd"><td>regex</td>
+<td>&#8221;.*&#8221;</td>
+<td>Regular expression for matching against events</td>
+</tr>
+<tr class="row-even"><td>excludeEvents</td>
+<td>false</td>
+<td>If true, regex determines events to exclude, otherwise regex determines
+events to include.</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="regex-extractor-interceptor">
+<h4>Regex Extractor Interceptor<a class="headerlink" href="#regex-extractor-interceptor" title="Permalink to this headline">¶</a></h4>
+<p>This interceptor extracts regex match groups using a specified regular expression and appends the match groups as headers on the event.
+It also supports pluggable serializers for formatting the match groups before adding them as event headers.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="21%" />
+<col width="6%" />
+<col width="73%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">regex_extractor</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>regex</strong></td>
+<td>&#8211;</td>
+<td>Regular expression for matching against events</td>
+</tr>
+<tr class="row-even"><td><strong>serializers</strong></td>
+<td>&#8211;</td>
+<td>Space-separated list of serializers for mapping matches to header names and serializing their
+values. (See example below)
+Flume provides built-in support for the following serializers:
+<tt class="docutils literal"><span class="pre">org.apache.flume.interceptor.RegexExtractorInterceptorPassThroughSerializer</span></tt>
+<tt class="docutils literal"><span class="pre">org.apache.flume.interceptor.RegexExtractorInterceptorMillisSerializer</span></tt></td>
+</tr>
+<tr class="row-odd"><td>serializers.&lt;s1&gt;.type</td>
+<td>default</td>
+<td>Must be <tt class="docutils literal"><span class="pre">default</span></tt> (org.apache.flume.interceptor.RegexExtractorInterceptorPassThroughSerializer),
+<tt class="docutils literal"><span class="pre">org.apache.flume.interceptor.RegexExtractorInterceptorMillisSerializer</span></tt>,
+or the FQCN of a custom class that implements <tt class="docutils literal"><span class="pre">org.apache.flume.interceptor.RegexExtractorInterceptorSerializer</span></tt></td>
+</tr>
+<tr class="row-even"><td>serializers.&lt;s1&gt;.<strong>name</strong></td>
+<td>&#8211;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>serializers.*</td>
+<td>&#8211;</td>
+<td>Serializer-specific properties</td>
+</tr>
+</tbody>
+</table>
+<p>The serializers are used to map the matches to a header name and a formatted header value; by default, you only need to specify
+the header name and the default <tt class="docutils literal"><span class="pre">org.apache.flume.interceptor.RegexExtractorInterceptorPassThroughSerializer</span></tt> will be used.
+This serializer simply maps the matches to the specified header name and passes the value through as it was extracted by the regex.
+You can plug custom serializer implementations into the extractor using the fully qualified class name (FQCN) to format the matches
+in anyway you like.</p>
+</div>
+<div class="section" id="example-1">
+<h4>Example 1:<a class="headerlink" href="#example-1" title="Permalink to this headline">¶</a></h4>
+<p>If the Flume event body contained <tt class="docutils literal"><span class="pre">1:2:3.4foobar5</span></tt> and the following configuration was used</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.r1.interceptors.i1.regex</span> <span class="o">=</span> <span class="s">(\\d):(\\d):(\\d)</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers</span> <span class="o">=</span> <span class="s">s1 s2 s3</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers.s1.name</span> <span class="o">=</span> <span class="s">one</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers.s2.name</span> <span class="o">=</span> <span class="s">two</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers.s3.name</span> <span class="o">=</span> <span class="s">three</span>
+</pre></div>
+</div>
+<p>The extracted event will contain the same body but the following headers will have been added <tt class="docutils literal"><span class="pre">one=&gt;1,</span> <span class="pre">two=&gt;2,</span> <span class="pre">three=&gt;3</span></tt></p>
+</div>
+<div class="section" id="example-2">
+<h4>Example 2:<a class="headerlink" href="#example-2" title="Permalink to this headline">¶</a></h4>
+<p>If the Flume event body contained <tt class="docutils literal"><span class="pre">2012-10-18</span> <span class="pre">18:47:57,614</span> <span class="pre">some</span> <span class="pre">log</span> <span class="pre">line</span></tt> and the following configuration was used</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources.r1.interceptors.i1.regex</span> <span class="o">=</span> <span class="s">^(?:\\n)?(\\d\\d\\d\\d-\\d\\d-\\d\\d\\s\\d\\d:\\d\\d)</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers</span> <span class="o">=</span> <span class="s">s1</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers.s1.type</span> <span class="o">=</span> <span class="s">org.apache.flume.interceptor.RegexExtractorInterceptorMillisSerializer</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers.s1.name</span> <span class="o">=</span> <span class="s">timestamp</span>
+<span class="na">a1.sources.r1.interceptors.i1.serializers.s1.pattern</span> <span class="o">=</span> <span class="s">yyyy-MM-dd HH:mm</span>
+</pre></div>
+</div>
+<p>the extracted event will contain the same body but the following headers will have been added <tt class="docutils literal"><span class="pre">timestamp=&gt;1350611220000</span></tt></p>
+</div>
+</div>
+<div class="section" id="flume-properties">
+<h3>Flume Properties<a class="headerlink" href="#flume-properties" title="Permalink to this headline">¶</a></h3>
+<table border="1" class="docutils">
+<colgroup>
+<col width="25%" />
+<col width="7%" />
+<col width="68%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>flume.called.from.service</td>
+<td>&#8211;</td>
+<td>If this property is specified then the Flume agent will continue
+polling for the config file even if the config file is not found
+at the expected location. Otherwise, the Flume agent will terminate
+if the config doesn&#8217;t exist at the expected location. No property
+value is needed when setting this property (eg, just specifying
+-Dflume.called.from.service is enough)</td>
+</tr>
+</tbody>
+</table>
+<div class="section" id="property-flume-called-from-service">
+<h4>Property: flume.called.from.service<a class="headerlink" href="#property-flume-called-from-service" title="Permalink to this headline">¶</a></h4>
+<p>Flume periodically polls, every 30 seconds, for changes to the specified
+config file. A Flume agent loads a new configuration from the config file if
+either an existing file is polled for the first time, or if an existing
+file&#8217;s modification date has changed since the last time it was polled.
+Renaming or moving a file does not change its modification time. When a
+Flume agent polls a non-existent file then one of two things happens: 1.
+When the agent polls a non-existent config file for the first time, then the
+agent behaves according to the flume.called.from.service property. If the
+property is set, then the agent will continue polling (always at the same
+period &#8211; every 30 seconds). If the property is not set, then the agent
+immediately terminates. ...OR... 2. When the agent polls a non-existent
+config file and this is not the first time the file is polled, then the
+agent makes no config changes for this polling period. The agent continues
+polling rather than terminating.</p>
+</div>
+</div>
+</div>
+<div class="section" id="configuration-filters">
+<h2>Configuration Filters<a class="headerlink" href="#configuration-filters" title="Permalink to this headline">¶</a></h2>
+<p>Flume provides a tool for injecting sensitive or generated data into the configuration
+in the form of configuration filters. A configuration key can be set as the value of configuration properties
+and it will be replaced by the configuration filter with the value it represents.</p>
+<div class="section" id="common-usage-of-config-filters">
+<h3>Common usage of config filters<a class="headerlink" href="#common-usage-of-config-filters" title="Permalink to this headline">¶</a></h3>
+<p>The format is similar to the Java Expression Language, however
+it is currently not a fully working EL expression parser, just a format that looks like it.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">&lt;agent_name&gt;.configfilters</span> <span class="o">=</span> <span class="s">&lt;filter_name&gt;</span>
+<span class="na">&lt;agent_name&gt;.configfilters.&lt;filter_name&gt;.type</span> <span class="o">=</span> <span class="s">&lt;filter_type&gt;</span>
+
+<span class="na">&lt;agent_name&gt;.sources.&lt;source_name&gt;.parameter</span> <span class="o">=</span> <span class="s">${&lt;filter_name&gt;[&#39;&lt;key_for_sensitive_or_generated_data&gt;&#39;]}</span>
+<span class="na">&lt;agent_name&gt;.sinks.&lt;sink_name&gt;.parameter</span> <span class="o">=</span> <span class="s">${&lt;filter_name&gt;[&#39;&lt;key_for_sensitive_or_generated_data&gt;&#39;]}</span>
+<span class="na">&lt;agent_name&gt;.&lt;component_type&gt;.&lt;component_name&gt;.parameter</span> <span class="o">=</span> <span class="s">${&lt;filter_name&gt;[&#39;&lt;key_for_sensitive_or_generated_data&gt;&#39;]}</span>
+<span class="c">#or</span>
+<span class="na">&lt;agent_name&gt;.&lt;component_type&gt;.&lt;component_name&gt;.parameter</span> <span class="o">=</span> <span class="s">${&lt;filter_name&gt;[&quot;&lt;key_for_sensitive_or_generated_data&gt;&quot;]}</span>
+<span class="c">#or</span>
+<span class="na">&lt;agent_name&gt;.&lt;component_type&gt;.&lt;component_name&gt;.parameter</span> <span class="o">=</span> <span class="s">${&lt;filter_name&gt;[&lt;key_for_sensitive_or_generated_data&gt;]}</span>
+<span class="c">#or</span>
+<span class="na">&lt;agent_name&gt;.&lt;component_type&gt;.&lt;component_name&gt;.parameter</span> <span class="o">=</span> <span class="s">some_constant_data${&lt;filter_name&gt;[&lt;key_for_sensitive_or_generated_data&gt;]}</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="environment-variable-config-filter">
+<h3>Environment Variable Config Filter<a class="headerlink" href="#environment-variable-config-filter" title="Permalink to this headline">¶</a></h3>
+<table border="1" class="docutils">
+<colgroup>
+<col width="36%" />
+<col width="11%" />
+<col width="52%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">env</span></tt></td>
+</tr>
+</tbody>
+</table>
+<div class="section" id="example">
+<h4>Example<a class="headerlink" href="#example" title="Permalink to this headline">¶</a></h4>
+<p>To hide a password in the configuration set its value as in the following example.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.configfilters</span> <span class="o">=</span> <span class="s">f1</span>
+
+<span class="na">a1.configfilters.f1.type</span> <span class="o">=</span> <span class="s">env</span>
+
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span>  <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">http</span>
+<span class="na">a1.sources.r1.keystorePassword</span> <span class="o">=</span> <span class="s">${f1[&#39;my_keystore_password&#39;]} #will get the value Secret123</span>
+</pre></div>
+</div>
+<p>Here the <tt class="docutils literal"><span class="pre">a1.sources.r1.keystorePassword</span></tt> configuration property will get the value of the <tt class="docutils literal"><span class="pre">my_keystore_password</span></tt>
+environment variable. One way to set the environment variable is to run flume agent like this:</p>
+<p><tt class="docutils literal"><span class="pre">$</span> <span class="pre">my_keystore_password=Secret123</span> <span class="pre">bin/flume-ng</span> <span class="pre">agent</span> <span class="pre">--conf</span> <span class="pre">conf</span> <span class="pre">--conf-file</span> <span class="pre">example.conf</span> <span class="pre">...</span></tt></p>
+</div>
+</div>
+<div class="section" id="external-process-config-filter">
+<h3>External Process Config Filter<a class="headerlink" href="#external-process-config-filter" title="Permalink to this headline">¶</a></h3>
+<table border="1" class="docutils">
+<colgroup>
+<col width="14%" />
+<col width="4%" />
+<col width="82%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">external</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>command</strong></td>
+<td>&#8211;</td>
+<td>The command that will be executed to get the value for the given key. The command will be called like: <tt class="docutils literal"><span class="pre">&lt;command&gt;</span> <span class="pre">&lt;key&gt;</span></tt> And expected to return a single line value with exit code <tt class="docutils literal"><span class="pre">0</span></tt>.</td>
+</tr>
+<tr class="row-even"><td>charset</td>
+<td>UTF-8</td>
+<td>The characterset of the returned string.</td>
+</tr>
+</tbody>
+</table>
+<div class="section" id="id14">
+<h4>Example<a class="headerlink" href="#id14" title="Permalink to this headline">¶</a></h4>
+<p>To hide a password in the configuration set its value as in the following example.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.configfilters</span> <span class="o">=</span> <span class="s">f1</span>
+
+<span class="na">a1.configfilters.f1.type</span> <span class="o">=</span> <span class="s">external</span>
+<span class="na">a1.configfilters.f1.command</span> <span class="o">=</span> <span class="s">/usr/bin/passwordResolver.sh</span>
+<span class="na">a1.configfilters.f1.charset</span> <span class="o">=</span> <span class="s">UTF-8</span>
+
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span>  <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">http</span>
+<span class="na">a1.sources.r1.keystorePassword</span> <span class="o">=</span> <span class="s">${f1[&#39;my_keystore_password&#39;]} #will get the value Secret123</span>
+</pre></div>
+</div>
+<p>In this example flume will run the following command to get the value</p>
+<p><tt class="docutils literal"><span class="pre">$</span> <span class="pre">/usr/bin/passwordResolver.sh</span> <span class="pre">my_keystore_password</span></tt></p>
+<p>The <tt class="docutils literal"><span class="pre">passwordResolver.sh</span></tt> will return <tt class="docutils literal"><span class="pre">Secret123</span></tt> with an exit code <tt class="docutils literal"><span class="pre">0</span></tt>.</p>
+</div>
+<div class="section" id="id15">
+<h4>Example 2<a class="headerlink" href="#id15" title="Permalink to this headline">¶</a></h4>
+<p>To generate a part of the directory for rolling file sink set its value as in the following example.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.configfilters</span> <span class="o">=</span> <span class="s">f1</span>
+
+<span class="na">a1.configfilters.f1.type</span> <span class="o">=</span> <span class="s">external</span>
+<span class="na">a1.configfilters.f1.command</span> <span class="o">=</span> <span class="s">/usr/bin/generateUniqId.sh</span>
+<span class="na">a1.configfilters.f1.charset</span> <span class="o">=</span> <span class="s">UTF-8</span>
+
+<span class="na">a1.sinks</span> <span class="o">=</span> <span class="s">k1</span>
+<span class="na">a1.sinks.k1.type</span> <span class="o">=</span> <span class="s">file_roll</span>
+<span class="na">a1.sinks.k1.channel</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.sinks.k1.sink.directory</span> <span class="o">=</span> <span class="s">/var/log/flume/agent_${f1[&#39;agent_name&#39;]} # will be /var/log/flume/agent_1234</span>
+</pre></div>
+</div>
+<p>In this example flume will run the following command to get the value</p>
+<p><tt class="docutils literal"><span class="pre">$</span> <span class="pre">/usr/bin/generateUniqId.sh</span> <span class="pre">agent_name</span></tt></p>
+<p>The <tt class="docutils literal"><span class="pre">generateUniqId.sh</span></tt> will return <tt class="docutils literal"><span class="pre">1234</span></tt> with an exit code <tt class="docutils literal"><span class="pre">0</span></tt>.</p>
+</div>
+</div>
+<div class="section" id="hadoop-credential-store-config-filter">
+<h3>Hadoop Credential Store Config Filter<a class="headerlink" href="#hadoop-credential-store-config-filter" title="Permalink to this headline">¶</a></h3>
+<p>A hadoop-common library needed on the classpath for this feature (2.6+ version).
+If hadoop is installed the agent adds it to the classpath automatically</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="4%" />
+<col width="76%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>type</strong></td>
+<td>&#8211;</td>
+<td>The component type name has to be <tt class="docutils literal"><span class="pre">hadoop</span></tt></td>
+</tr>
+<tr class="row-odd"><td><strong>credential.provider.path</strong></td>
+<td>&#8211;</td>
+<td>The provider path. See hadoop documentation _here: <a class="reference external" href="https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/CredentialProviderAPI.html#Configuring_the_Provider_Path">https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/CredentialProviderAPI.html#Configuring_the_Provider_Path</a></td>
+</tr>
+<tr class="row-even"><td>credstore.java-keystore-provider.password-file</td>
+<td>&#8211;</td>
+<td>The name of the password file if a file is used to store the password. The file must e on the classpath.
+Provider password can be set with the HADOOP_CREDSTORE_PASSWORD environment variable or left empty.</td>
+</tr>
+</tbody>
+</table>
+<div class="section" id="id16">
+<h4>Example<a class="headerlink" href="#id16" title="Permalink to this headline">¶</a></h4>
+<p>To hide a password in the configuration set its value as in the following example.</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="na">a1.sources</span> <span class="o">=</span> <span class="s">r1</span>
+<span class="na">a1.channels</span> <span class="o">=</span> <span class="s">c1</span>
+<span class="na">a1.configfilters</span> <span class="o">=</span> <span class="s">f1</span>
+
+<span class="na">a1.configfilters.f1.type</span> <span class="o">=</span> <span class="s">hadoop</span>
+<span class="na">a1.configfilters.f1.credential.provider.path</span> <span class="o">=</span> <span class="s">jceks://file/&lt;path_to_jceks file&gt;</span>
+
+<span class="na">a1.sources.r1.channels</span> <span class="o">=</span>  <span class="s">c1</span>
+<span class="na">a1.sources.r1.type</span> <span class="o">=</span> <span class="s">http</span>
+<span class="na">a1.sources.r1.keystorePassword</span> <span class="o">=</span> <span class="s">${f1[&#39;my_keystore_password&#39;]} #will get the value from the credential store</span>
+</pre></div>
+</div>
+</div>
+</div>
+</div>
+<div class="section" id="log4j-appender">
+<h2>Log4J Appender<a class="headerlink" href="#log4j-appender" title="Permalink to this headline">¶</a></h2>
+<p>Appends Log4j events to a flume agent&#8217;s avro source. A client using this
+appender must have the flume-ng-sdk in the classpath (eg,
+flume-ng-sdk-1.10.0.jar).
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="19%" />
+<col width="6%" />
+<col width="75%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>Hostname</strong></td>
+<td>&#8211;</td>
+<td>The hostname on which a remote Flume agent is running with an
+avro source.</td>
+</tr>
+<tr class="row-odd"><td><strong>Port</strong></td>
+<td>&#8211;</td>
+<td>The port at which the remote Flume agent&#8217;s avro source is
+listening.</td>
+</tr>
+<tr class="row-even"><td>UnsafeMode</td>
+<td>false</td>
+<td>If true, the appender will not throw exceptions on failure to
+send the events.</td>
+</tr>
+<tr class="row-odd"><td>AvroReflectionEnabled</td>
+<td>false</td>
+<td>Use Avro Reflection to serialize Log4j events. (Do not use when users log strings)</td>
+</tr>
+<tr class="row-even"><td>AvroSchemaUrl</td>
+<td>&#8211;</td>
+<td>A URL from which the Avro schema can be retrieved.</td>
+</tr>
+</tbody>
+</table>
+<p>Sample log4j.properties file:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c">#...</span>
+<span class="na">log4j.appender.flume</span> <span class="o">=</span> <span class="s">org.apache.flume.clients.log4jappender.Log4jAppender</span>
+<span class="na">log4j.appender.flume.Hostname</span> <span class="o">=</span> <span class="s">example.com</span>
+<span class="na">log4j.appender.flume.Port</span> <span class="o">=</span> <span class="s">41414</span>
+<span class="na">log4j.appender.flume.UnsafeMode</span> <span class="o">=</span> <span class="s">true</span>
+
+<span class="c"># configure a class&#39;s logger to output to the flume appender</span>
+<span class="na">log4j.logger.org.example.MyClass</span> <span class="o">=</span> <span class="s">DEBUG,flume</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+<p>By default each event is converted to a string by calling <tt class="docutils literal"><span class="pre">toString()</span></tt>,
+or by using the Log4j layout, if specified.</p>
+<p>If the event is an instance of
+<tt class="docutils literal"><span class="pre">org.apache.avro.generic.GenericRecord</span></tt>, <tt class="docutils literal"><span class="pre">org.apache.avro.specific.SpecificRecord</span></tt>,
+or if the property <tt class="docutils literal"><span class="pre">AvroReflectionEnabled</span></tt> is set to <tt class="docutils literal"><span class="pre">true</span></tt> then the event will be
+serialized using Avro serialization.</p>
+<p>Serializing every event with its Avro schema is inefficient, so it is good practice to
+provide a schema URL from which the schema can be retrieved by the downstream sink,
+typically the HDFS sink. If <tt class="docutils literal"><span class="pre">AvroSchemaUrl</span></tt> is not specified,
+then the schema will be included as a Flume header.</p>
+<p>Sample log4j.properties file configured to use Avro serialization:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c">#...</span>
+<span class="na">log4j.appender.flume</span> <span class="o">=</span> <span class="s">org.apache.flume.clients.log4jappender.Log4jAppender</span>
+<span class="na">log4j.appender.flume.Hostname</span> <span class="o">=</span> <span class="s">example.com</span>
+<span class="na">log4j.appender.flume.Port</span> <span class="o">=</span> <span class="s">41414</span>
+<span class="na">log4j.appender.flume.AvroReflectionEnabled</span> <span class="o">=</span> <span class="s">true</span>
+<span class="na">log4j.appender.flume.AvroSchemaUrl</span> <span class="o">=</span> <span class="s">hdfs://namenode/path/to/schema.avsc</span>
+
+<span class="c"># configure a class&#39;s logger to output to the flume appender</span>
+<span class="na">log4j.logger.org.example.MyClass</span> <span class="o">=</span> <span class="s">DEBUG,flume</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="load-balancing-log4j-appender">
+<h2>Load Balancing Log4J Appender<a class="headerlink" href="#load-balancing-log4j-appender" title="Permalink to this headline">¶</a></h2>
+<p>Appends Log4j events to a list of flume agent&#8217;s avro source. A client using this
+appender must have the flume-ng-sdk in the classpath (eg,
+flume-ng-sdk-1.10.0.jar). This appender supports a round-robin and random
+scheme for performing the load balancing. It also supports a configurable backoff
+timeout so that down agents are removed temporarily from the set of hosts
+Required properties are in <strong>bold</strong>.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="22%" />
+<col width="11%" />
+<col width="67%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Property Name</th>
+<th class="head">Default</th>
+<th class="head">Description</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>Hosts</strong></td>
+<td>&#8211;</td>
+<td>A space-separated list of host:port at which Flume (through
+an AvroSource) is listening for events</td>
+</tr>
+<tr class="row-odd"><td>Selector</td>
+<td>ROUND_ROBIN</td>
+<td>Selection mechanism. Must be either ROUND_ROBIN,
+RANDOM or custom FQDN to class that inherits from
+LoadBalancingSelector.</td>
+</tr>
+<tr class="row-even"><td>MaxBackoff</td>
+<td>&#8211;</td>
+<td>A long value representing the maximum amount of time in
+milliseconds the Load balancing client will backoff from a
+node that has failed to consume an event. Defaults to no backoff</td>
+</tr>
+<tr class="row-odd"><td>UnsafeMode</td>
+<td>false</td>
+<td>If true, the appender will not throw exceptions on failure to
+send the events.</td>
+</tr>
+<tr class="row-even"><td>AvroReflectionEnabled</td>
+<td>false</td>
+<td>Use Avro Reflection to serialize Log4j events.</td>
+</tr>
+<tr class="row-odd"><td>AvroSchemaUrl</td>
+<td>&#8211;</td>
+<td>A URL from which the Avro schema can be retrieved.</td>
+</tr>
+</tbody>
+</table>
+<p>Sample log4j.properties file configured using defaults:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c">#...</span>
+<span class="na">log4j.appender.out2</span> <span class="o">=</span> <span class="s">org.apache.flume.clients.log4jappender.LoadBalancingLog4jAppender</span>
+<span class="na">log4j.appender.out2.Hosts</span> <span class="o">=</span> <span class="s">localhost:25430 localhost:25431</span>
+
+<span class="c"># configure a class&#39;s logger to output to the flume appender</span>
+<span class="na">log4j.logger.org.example.MyClass</span> <span class="o">=</span> <span class="s">DEBUG,flume</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+<p>Sample log4j.properties file configured using RANDOM load balancing:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c">#...</span>
+<span class="na">log4j.appender.out2</span> <span class="o">=</span> <span class="s">org.apache.flume.clients.log4jappender.LoadBalancingLog4jAppender</span>
+<span class="na">log4j.appender.out2.Hosts</span> <span class="o">=</span> <span class="s">localhost:25430 localhost:25431</span>
+<span class="na">log4j.appender.out2.Selector</span> <span class="o">=</span> <span class="s">RANDOM</span>
+
+<span class="c"># configure a class&#39;s logger to output to the flume appender</span>
+<span class="na">log4j.logger.org.example.MyClass</span> <span class="o">=</span> <span class="s">DEBUG,flume</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+<p>Sample log4j.properties file configured using backoff:</p>
+<div class="highlight-properties"><div class="highlight"><pre><span class="c">#...</span>
+<span class="na">log4j.appender.out2</span> <span class="o">=</span> <span class="s">org.apache.flume.clients.log4jappender.LoadBalancingLog4jAppender</span>
+<span class="na">log4j.appender.out2.Hosts</span> <span class="o">=</span> <span class="s">localhost:25430 localhost:25431 localhost:25432</span>
+<span class="na">log4j.appender.out2.Selector</span> <span class="o">=</span> <span class="s">ROUND_ROBIN</span>
+<span class="na">log4j.appender.out2.MaxBackoff</span> <span class="o">=</span> <span class="s">30000</span>
+
+<span class="c"># configure a class&#39;s logger to output to the flume appender</span>
+<span class="na">log4j.logger.org.example.MyClass</span> <span class="o">=</span> <span class="s">DEBUG,flume</span>
+<span class="c">#...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="security">
+<h2>Security<a class="headerlink" href="#security" title="Permalink to this headline">¶</a></h2>
+<p>The HDFS sink, HBase sink, Thrift source, Thrift sink and Kite Dataset sink all support
+Kerberos authentication. Please refer to the corresponding sections for
+configuring the Kerberos-related options.</p>
+<p>Flume agent will authenticate to the kerberos KDC as a single principal, which will be
+used by different components that require kerberos authentication. The principal and
+keytab configured for Thrift source, Thrift sink, HDFS sink, HBase sink and DataSet sink
+should be the same, otherwise the component will fail to start.</p>
+</div>
+<div class="section" id="monitoring">
+<h2>Monitoring<a class="headerlink" href="#monitoring" title="Permalink to this headline">¶</a></h2>
+<p>Monitoring in Flume is still a work in progress. Changes can happen very often.
+Several Flume components report metrics to the JMX platform MBean server. These
+metrics can be queried using Jconsole.</p>
+<div class="section" id="available-component-metrics">
+<h3>Available Component Metrics<a class="headerlink" href="#available-component-metrics" title="Permalink to this headline">¶</a></h3>
+<p>The following tables show what metrics are available for components. Each component only maintains a
+set of metrics, indicated by an &#8216;x&#8217;, the unmaintained ones show default values, that is 0.
+These tables tell you where you can expect meaningful data.
+The name of the metrics should be descriptive enough, for more information you have to dig into the
+source code of the components.</p>
+<div class="section" id="sources-1">
+<h4>Sources 1<a class="headerlink" href="#sources-1" title="Permalink to this headline">¶</a></h4>
+<table border="1" class="docutils">
+<colgroup>
+<col width="31%" />
+<col width="7%" />
+<col width="7%" />
+<col width="7%" />
+<col width="6%" />
+<col width="8%" />
+<col width="24%" />
+<col width="10%" />
+</colgroup>
+<tbody valign="top">
+<tr class="row-odd"><td>&nbsp;</td>
+<td>Avro</td>
+<td>Exec</td>
+<td>HTTP</td>
+<td>JMS</td>
+<td>Kafka</td>
+<td>MultiportSyslogTCP</td>
+<td>Scribe</td>
+</tr>
+<tr class="row-even"><td>AppendAcceptedCount</td>
+<td>x</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>AppendBatchAcceptedCount</td>
+<td>x</td>
+<td>&nbsp;</td>
+<td>x</td>
+<td>x</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>AppendBatchReceivedCount</td>
+<td>x</td>
+<td>&nbsp;</td>
+<td>x</td>
+<td>x</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>AppendReceivedCount</td>
+<td>x</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
... 421158 lines suppressed ...