tier1.sources.srctest.max.rows = 1000
tier1.sources.srctest.hibernate.connection.provider_class = org.hibernate.connection.C3P0ConnectionProvider
tier1.sources.srctest.hibernate.c3p0.min_size=1
tier1.sources.srctest.hibernate.c3p0.max_size=10
tier1.sources.sql-source.incremental.column.name = id
tier1.channels.chtest.type = file
tier1.channels.chtest.checkpointDir = /flume/oracle/flume-ng/checkpoint
tier1.channels.chtest.dataDirs = /flume/oracle/flume-ng/data
#tier1.channels.chtest.capacity=10000
#tier1.channels.chtest.transactionCapacity=1000
#tier1.channels.chtest.byteCapacityBufferPercentage=20
#tier1.channels.chtest.byteCapacity=1600
#tier1.channels.chtest.capacity = 100
tier1.sources.srctest.channels=chtest
tier1.sinks.sinktest.channel = chtest
tier1.sinks.sinktest.type = hdfs
tier1.sinks.sinktest.hdfs.path = /flume/oracle/%Y-%m-%d
tier1.sinks.sinktest.hdfs.filePrefix = data
tier1.sinks.sinktest.hdfs.inUsePrefix = .
tier1.sinks.sinktest.hdfs.rollInterval = 30
tier1.sinks.sinktest.hdfs.rollSize = 0
tier1.sinks.sinktest.hdfs.rollCount = 0
tier1.sinks.sinktest.hdfs.batchSize = 1000
tier1.sinks.sinktest.hdfs.writeFormat = text
tier1.sinks.sinktest.hdfs.fileType = DataStream
CD0拜客生活常识网
sudo -u hdfs hadoop dfsadmin -safemode leave
CD0拜客生活常识网
flume-ng agent --conf conf --conf-file test-oracle-hdfs.conf --name tier2 -Dflume.root.logger=INFO,console