Java源码示例:avro.shaded.com.google.common.collect.Lists
示例1
private Consumer<String, byte[]> createConsumer() throws Exception {
Properties props = PropertiesHolder.getProperties(Constants.Properties.CONSUMER_CONFIG);
props.setProperty("client.id", this.topologyId + "_consumer");
props.setProperty("group.id", this.topologyId + "_grp");
TopicProvider dataTopicProvider = new DataInputTopicProvider();
TopicProvider controlTopicProvider = new ControlTopicProvider();
TopicProvider schemaTopicProvider = new SchemaTopicProvider();
this.controlTopics = controlTopicProvider.provideTopics();
this.dataTopics = dataTopicProvider.provideTopics();
List<String> topics = Lists.newArrayList();
topics.addAll(controlTopics);
topics.addAll(dataTopics);
if (DbusDatasourceType.ORACLE == GlobalCache.getDatasourceType()
|| DbusDatasourceType.DB2 == GlobalCache.getDatasourceType()
) {
this.schemaTopics = schemaTopicProvider.provideTopics();
topics.addAll(schemaTopics);
}
Consumer<String, byte[]> consumer = consumerProvider.consumer(props, topics);
Map<String, Object> data = zkNodeOperator.getData();
for (Map.Entry<String, Object> entry : data.entrySet()) {
TopicInfo b = TopicInfo.build((Map<String, Object>) entry.getValue());
this.pausedTopics.put(b.getTopic(), b);
}
consumerProvider.pause(consumer, this.pausedTopics.entrySet()
.stream()
.map(entry -> entry.getValue())
.collect(Collectors.toList()));
return consumer;
}
示例2
/**
* Get the step from the hierachical string path
*
* @param path
* @return
*/
public static Stack<String> getPathSteps(String path) {
if (path.startsWith(".")) {
path = path.substring(1);
}
Stack<String> pathSteps = new Stack<String>();
List<String> stepsList = Arrays.asList(path.split("\\."));
pathSteps.addAll(Lists.reverse(stepsList));
return pathSteps;
}
示例3
@Test
public void testGetDeltaFieldNamesForNewSchema(){
Configuration conf = mock(Configuration.class);
when(conf.get(FieldAttributeBasedDeltaFieldsProvider.ATTRIBUTE_FIELD)).thenReturn("attributes_json");
when(conf.get(FieldAttributeBasedDeltaFieldsProvider.DELTA_PROP_NAME,
FieldAttributeBasedDeltaFieldsProvider.DEFAULT_DELTA_PROP_NAME))
.thenReturn(FieldAttributeBasedDeltaFieldsProvider.DEFAULT_DELTA_PROP_NAME);
AvroDeltaFieldNameProvider provider = new FieldAttributeBasedDeltaFieldsProvider(conf);
Schema original = new Schema.Parser().parse(FULL_SCHEMA_WITH_ATTRIBUTES);
GenericRecord record = mock(GenericRecord.class);
when(record.getSchema()).thenReturn(original);
List<String> fields = provider.getDeltaFieldNames(record);
Assert.assertEquals(fields, Lists.newArrayList("scn2", "scn"));
}
示例4
/**
* @param reloadJson: reload control msg json
*/
private void reloadConfig(String reloadJson) {
close();
ZKHelper zkHelper = null;
DBHelper dbHelper = null;
try {
dsInfo = new DataSourceInfo();
zkHelper = new ZKHelper(topologyRoot, topologyID, zkServers);
zkHelper.loadDsNameAndOffset(dsInfo);
dbHelper = new DBHelper(zkServers);
dbHelper.loadDsInfo(dsInfo);
logger.info(String.format("Spout read datasource: %s", dsInfo.toString()));
//init consumer
dataTopicPartition = new TopicPartition(dsInfo.getDataTopic(), 0);
ctrlTopicPartition = new TopicPartition(dsInfo.getCtrlTopic(), 0);
List<TopicPartition> topics = Arrays.asList(dataTopicPartition, ctrlTopicPartition);
consumer = new KafkaConsumer(zkHelper.getConsumerProps());
consumer.assign(topics);
//skip offset
long oldOffset = consumer.position(dataTopicPartition);
logger.info(String.format("reloaded offset as: %d", oldOffset));
String offset = dsInfo.getDataTopicOffset();
if (offset.equalsIgnoreCase("none")) {
; // do nothing
} else if (offset.equalsIgnoreCase("begin")) {
consumer.seekToBeginning(Lists.newArrayList(dataTopicPartition));
logger.info(String.format("Offset seek to begin, changed as: %d", consumer.position(dataTopicPartition)));
} else if (offset.equalsIgnoreCase("end")) {
consumer.seekToEnd(Lists.newArrayList(dataTopicPartition));
logger.info(String.format("Offset seek to end, changed as: %d", consumer.position(dataTopicPartition)));
} else {
long nOffset = Long.parseLong(offset);
consumer.seek(dataTopicPartition, nOffset);
logger.info(String.format("Offset changed as: %d", consumer.position(dataTopicPartition)));
}
dsInfo.resetDataTopicOffset();
zkHelper.saveDsInfo(dsInfo);
zkHelper.saveReloadStatus(reloadJson, "dispatcher-spout", true);
} catch (Exception ex) {
logger.error("KafkaConsumerSpout reloadConfig():", ex);
collector.reportError(ex);
throw new RuntimeException(ex);
} finally {
if (dbHelper != null) {
dbHelper.close();
}
if (zkHelper != null) {
zkHelper.close();
}
}
}
示例5
public OracleMessageHandler(RecordProcessListener rpListener, ConsumerListener consumerListener) {
super(rpListener, consumerListener);
this.schemaTopics = Lists.newArrayList(consumerListener.getSchemaTopics());
createProcessors();
}
示例6
public Db2MessageHandler(RecordProcessListener rpListener, ConsumerListener consumerListener) {
super(rpListener, consumerListener);
this.schemaTopics = Lists.newArrayList(consumerListener.getSchemaTopics());
createProcessors();
}