Java源码示例:scala.collection.mutable.WrappedArray
示例1
private void updateCollectorItemMetricDetail(CollectorItemMetricDetail collectorItemMetricDetail, Row itemRow) {
Date timeWindowDt = itemRow.getAs("timeWindow");
List<String> scaMetricList = Arrays.asList("coverage");
Collection<Object> javaCollection = JavaConversions.asJavaCollection(((WrappedArray) itemRow.getAs("metrics")).toList());
Optional.ofNullable(javaCollection)
.orElseGet(Collections::emptyList)
.forEach(m -> {
GenericRowWithSchema genericRowWithSchema = (GenericRowWithSchema) m;
String existingLabelName = genericRowWithSchema.getAs("name");
if (scaMetricList.contains(existingLabelName)) {
String valueStr = genericRowWithSchema.getAs("value");
try{
double value = Double.parseDouble(valueStr);
MetricCount mc = getMetricCount("", value, "unit-test-coverage");
if (mc != null) {
collectorItemMetricDetail.setStrategy(getCollectionStrategy());
collectorItemMetricDetail.addCollectorItemMetricCount(timeWindowDt, mc);
collectorItemMetricDetail.setLastScanDate(timeWindowDt);
}
}catch (Exception e){
LOGGER.info("Exception: Not a number, 'value' = "+valueStr,e);
}
}
});
}
示例2
private void updateCollectorItemMetricDetail(CollectorItemMetricDetail collectorItemMetricDetail, Row itemRow) {
Date timeWindowDt = itemRow.getAs("timeWindow");
List<String> scaMetricList = Arrays.asList("blocker_violations", "critical_violations", "major_violations", "sqale_index");
Collection<Object> javaCollection = JavaConversions.asJavaCollection(((WrappedArray) itemRow.getAs("metrics")).toList());
Optional.ofNullable(javaCollection)
.orElseGet(Collections::emptyList)
.forEach(m -> {
GenericRowWithSchema genericRowWithSchema = (GenericRowWithSchema) m;
String existingLabelName = genericRowWithSchema.getAs("name");
if (scaMetricList.contains(existingLabelName)) {
String valueStr = genericRowWithSchema.getAs("value");
try {
double value = Double.parseDouble(valueStr);
MetricCount mc = getMetricCount("", value, existingLabelName);
if (mc != null) {
collectorItemMetricDetail.setStrategy(getCollectionStrategy());
collectorItemMetricDetail.addCollectorItemMetricCount(timeWindowDt, mc);
collectorItemMetricDetail.setLastScanDate(timeWindowDt);
}
} catch (NumberFormatException e) {
LOGGER.info("Exception: Not a number, 'value' = "+valueStr,e);
}
}
});
}
示例3
private void updateCollectorItemMetricDetail(CollectorItemMetricDetail collectorItemMetricDetail, Row itemRow) {
Date timeWindowDt = itemRow.getAs("timeWindow");
List<String> metricList = Arrays.asList("High", "Medium", "Low");
Collection<Object> javaCollection = JavaConversions.asJavaCollection(((WrappedArray) itemRow.getAs("metrics")).toList());
Optional.ofNullable(javaCollection)
.orElseGet(Collections::emptyList)
.forEach(m -> {
GenericRowWithSchema genericRowWithSchema = (GenericRowWithSchema) m;
String existingLabelName = genericRowWithSchema.getAs("name");
if (metricList.contains(existingLabelName)) {
String valueStr = genericRowWithSchema.getAs("value");
try {
double value = Double.parseDouble(valueStr);
MetricCount mc = getMetricCount("", value, existingLabelName);
if (mc != null) {
collectorItemMetricDetail.setStrategy(getCollectionStrategy());
collectorItemMetricDetail.addCollectorItemMetricCount(timeWindowDt, mc);
collectorItemMetricDetail.setLastScanDate(timeWindowDt);
}
} catch (NumberFormatException e) {
LOGGER.info("Exception: Not a number, 'value' = "+valueStr,e);
}
}
});
}
示例4
private void updateCollectorItemMetricDetail(CollectorItemMetricDetail collectorItemMetricDetail, Row itemRow, String type) {
Date timeWindowDt = itemRow.getAs("timeWindow");
Collection<Object> javaCollection = JavaConversions.asJavaCollection(((WrappedArray) itemRow.getAs(type)).toList());
Optional.ofNullable(javaCollection)
.orElseGet(Collections::emptyList)
.forEach(m -> {
GenericRowWithSchema genericRowWithSchema = (GenericRowWithSchema) m;
String level = genericRowWithSchema.getAs("level");
int value = genericRowWithSchema.getAs("count");
MetricCount mc = getMetricCount(level, value, type);
if (mc != null) {
collectorItemMetricDetail.setStrategy(getCollectionStrategy());
collectorItemMetricDetail.addCollectorItemMetricCount(timeWindowDt, mc);
collectorItemMetricDetail.setLastScanDate(timeWindowDt);
}
});
}
示例5
public static final List<DashboardCollectorItem> getPipelineDashboardCollectorItems(Dataset<Row> ds) {
List<DashboardCollectorItem> arr = new ArrayList<>();
List<Row> dd = ds.collectAsList();
dd.forEach(row -> {
WrappedArray dashboardIds = row.getAs("dashboardIds");
Iterator iterdashboardIds = dashboardIds.iterator();
WrappedArray itemArray = row.getAs("collectorItems");
Iterator iter = itemArray.iterator();
for (int i = 0; i < dashboardIds.length(); i++) {
String productName = row.getAs("productName");
String componentName = row.getAs("componentName");
String dashboardId = (String) ((GenericRowWithSchema) row.getAs("dashboardId")).values()[0];
List<String> itemIds = new ArrayList<>();
DashboardCollectorItem dashboardCollectorItem = null;
dashboardCollectorItem = new DashboardCollectorItem();
String grs = (String) iterdashboardIds.next();
dashboardCollectorItem.setDashboardId(grs);
GenericRowWithSchema collId = (GenericRowWithSchema) iter.next();
itemIds.add((String) collId.get(0));
dashboardCollectorItem.setItems(itemIds);
String dashboardTitle = row.getAs("title");
String key = productName + DELIMITER + componentName + DELIMITER + dashboardTitle;
dashboardCollectorItem.setName(key);
dashboardCollectorItem.setProductDashboardIds(dashboardId);
arr.add(dashboardCollectorItem);
}
});
return arr;
}
示例6
private void addProductToPortfolio(Portfolio portfolio, Row productRow) {
String productName = productRow.getAs("productName");
String productDept = productRow.getAs("ownerDept");
String commonName = productRow.getAs("commonName");
String productId = (String) ((GenericRowWithSchema) productRow.getAs("productId")).values()[0];
LOGGER.debug(" Product Name = " + productName + " ; Owner Dept = " + productDept);
// For a given portfolio, check if the current product already exists in the product list for the portfolio
// If not, add it to the product list
Product product =
Optional.ofNullable(portfolio.getProducts())
.orElseGet(Collections::emptyList).stream()
.filter(p -> p.getName().equalsIgnoreCase(productName)
&& p.getLob().equalsIgnoreCase(productDept))
.findFirst().orElse(null);
if (product == null) {
product = new Product();
product.setId(new ObjectId(productId));
product.setLob(productDept);
product.setName(productName);
product.setCommonName(commonName);
product.setMetricLevel(MetricLevel.PRODUCT);
}
if (productRow.getAs("environments") != null) {
Collection<Object> environmentNames = JavaConversions.asJavaCollection(((WrappedArray) productRow.getAs("environments")).toStream().toList());
addEnvironmentsToProduct(product, environmentNames);
}
if (productRow.getAs("components") != null) {
Collection<Object> componentNames = JavaConversions.asJavaCollection(((WrappedArray) productRow.getAs("components")).toStream().toList());
addComponentsToProduct(product, componentNames);
}
product.addOwner(new PeopleRoleRelation(getPeople(productRow.getAs("appServiceOwner"), "appServiceOwner"), RoleRelationShipType.AppServiceOwner));
product.addOwner(new PeopleRoleRelation(getPeople(productRow.getAs("supportOwner"), "supportOwner"), RoleRelationShipType.SupportOwner));
product.addOwner(new PeopleRoleRelation(getPeople(productRow.getAs("developmentOwner"), "developmentOwner"), RoleRelationShipType.DevelopmentOwner));
portfolio.addProduct(product);
}
示例7
private void addProductToLob(Lob lob, Row productRow) {
String productName = productRow.getAs("productName");
String productDept = productRow.getAs("ownerDept");
String commonName = productRow.getAs("commonName");
String productId = (String) ((GenericRowWithSchema) productRow.getAs("productId")).values()[0];
LOGGER.debug(" Product Name = " + productName + " ; Owner Dept = " + productDept);
// For a given portfolio, check if the current product already exists in the product list for the portfolio
// If not, add it to the product list
Product product =
Optional.ofNullable(lob.getProducts())
.orElseGet(Collections::emptyList).stream()
.filter(p -> p.getName().equalsIgnoreCase(productName)
&& p.getLob().equalsIgnoreCase(productDept))
.findFirst().orElse(null);
if (product == null) {
product = new Product();
product.setId(new ObjectId(productId));
product.setLob(productDept);
product.setName(productName);
product.setCommonName(commonName);
product.setMetricLevel(MetricLevel.PRODUCT);
}
if (productRow.getAs("environments") != null) {
Collection<Object> environmentNames = JavaConversions.asJavaCollection(((WrappedArray) productRow.getAs("environments")).toStream().toList());
addEnvironmentsToProduct(product, environmentNames);
}
if (productRow.getAs("components") != null) {
Collection<Object> componentNames = JavaConversions.asJavaCollection(((WrappedArray) productRow.getAs("components")).toStream().toList());
addComponentsToProduct(product, componentNames);
}
product.addOwner(new PeopleRoleRelation(getPeople(productRow.getAs("appServiceOwner"), "appServiceOwner"), RoleRelationShipType.AppServiceOwner));
product.addOwner(new PeopleRoleRelation(getPeople(productRow.getAs("supportOwner"), "supportOwner"), RoleRelationShipType.SupportOwner));
product.addOwner(new PeopleRoleRelation(getPeople(productRow.getAs("developmentOwner"), "developmentOwner"), RoleRelationShipType.DevelopmentOwner));
lob.addProduct(product);
}
示例8
private void updateCollectorItemMetricDetail(CollectorItemMetricDetail collectorItemMetricDetail, Row itemRow) {
Date timeWindowDt = itemRow.getAs("timeWindow");
LOGGER.info("TimeWindow:" +timeWindowDt );
LOGGER.info("itemRow :" + itemRow);
Collection<Object> javaCollection = JavaConversions.asJavaCollection(((WrappedArray) itemRow.getAs("prodStageList")).toList());
Optional.ofNullable(javaCollection)
.orElseGet(Collections::emptyList).stream().map(m -> (GenericRowWithSchema) m).forEach(genericRowWithSchema -> {
Long pipelineTimeL = genericRowWithSchema.getAs("timestamp");
Date dateObj = new Timestamp(new Date(pipelineTimeL).getTime());
LOGGER.info("Date Object :" + dateObj);
Long scmTimeL = genericRowWithSchema.getAs("scmCommitTimestamp");
Long pipelineTimeAfterIgnore = pipelineTimeL/1000;
Long scmTimeAfterIgnore = scmTimeL/1000;
try {
Long diffTimestamp = Math.abs(pipelineTimeAfterIgnore - scmTimeAfterIgnore);
String strTimestampInsec = Long.toString(diffTimestamp);
double value = Double.parseDouble(strTimestampInsec);
MetricCount mc = getMetricCount("", value, "pipeline-lead-time");
if (mc != null) {
collectorItemMetricDetail.setStrategy(getCollectionStrategy());
collectorItemMetricDetail.addCollectorItemMetricCount(dateObj, mc);
collectorItemMetricDetail.setLastScanDate(dateObj);
}
} catch (NumberFormatException e) {
LOGGER.info("Exception: Not a number, 'value' = " + scmTimeAfterIgnore, e);
}
});
}
示例9
@Override
public CMMModel load(String path) {
org.apache.spark.ml.util.DefaultParamsReader.Metadata metadata = DefaultParamsReader.loadMetadata(path, sc(), CMMModel.class.getName());
String pipelinePath = new Path(path, "pipelineModel").toString();
PipelineModel pipelineModel = PipelineModel.load(pipelinePath);
String dataPath = new Path(path, "data").toString();
DataFrame df = sqlContext().read().format("parquet").load(dataPath);
Row row = df.select("markovOrder", "weights", "tagDictionary").head();
// load the Markov order
MarkovOrder order = MarkovOrder.values()[row.getInt(0)-1];
// load the weight vector
Vector w = row.getAs(1);
// load the tag dictionary
@SuppressWarnings("unchecked")
scala.collection.immutable.HashMap<String, WrappedArray<Integer>> td = (scala.collection.immutable.HashMap<String, WrappedArray<Integer>>)row.get(2);
Map<String, Set<Integer>> tagDict = new HashMap<String, Set<Integer>>();
Iterator<Tuple2<String, WrappedArray<Integer>>> iterator = td.iterator();
while (iterator.hasNext()) {
Tuple2<String, WrappedArray<Integer>> tuple = iterator.next();
Set<Integer> labels = new HashSet<Integer>();
scala.collection.immutable.List<Integer> list = tuple._2().toList();
for (int i = 0; i < list.size(); i++)
labels.add(list.apply(i));
tagDict.put(tuple._1(), labels);
}
// build a CMM model
CMMModel model = new CMMModel(pipelineModel, w, order, tagDict);
DefaultParamsReader.getAndSetParams(model, metadata);
return model;
}
示例10
@Override
public void registerClasses(Kryo kryo) {
// MicrobatchSource is serialized as data and may not be Kryo-serializable.
kryo.register(MicrobatchSource.class, new StatelessJavaSerializer());
kryo.register(ValueAndCoderLazySerializable.class, new ValueAndCoderKryoSerializer());
kryo.register(ArrayList.class);
kryo.register(ByteArray.class);
kryo.register(HashBasedTable.class);
kryo.register(KV.class);
kryo.register(LinkedHashMap.class);
kryo.register(Object[].class);
kryo.register(PaneInfo.class);
kryo.register(StateAndTimers.class);
kryo.register(TupleTag.class);
kryo.register(WrappedArray.ofRef.class);
try {
kryo.register(
Class.forName("org.apache.beam.sdk.util.WindowedValue$TimestampedValueInGlobalWindow"));
kryo.register(
Class.forName(
"org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.HashBasedTable$Factory"));
} catch (ClassNotFoundException e) {
throw new IllegalStateException("Unable to register classes with kryo.", e);
}
}
示例11
/**
* Read the input GeoJSON files into a Spark Dataset.
* GeoJSON attributes are located in the column "Properties" and the geometry in the column "Geometry",
* and hence it expands them. Then convert the GeoJSON Geometry into WKT using a UDF.
*
* @return a Spark's Dataset containing the data.
*/
private Dataset<Row> readGeoJSON(){
Dataset<Row> dataset = spark.read()
.option("multyLine", true)
.format("json")
.json(filenames);
//Expand the fields
dataset = dataset.drop("_corrupt_record").filter(dataset.col("geometry").isNotNull());
StructType schema = dataset.schema();
StructField[] gj_fields = schema.fields();
for (StructField sf : gj_fields){
DataType dt = sf.dataType();
if (dt instanceof StructType) {
StructType st = (StructType) dt;
if (st.fields().length > 0) {
String column_name = sf.name();
for (String field : st.fieldNames())
dataset = dataset.withColumn(field, functions.explode(functions.array(column_name + "." + field)));
dataset = dataset.drop(column_name);
}
}
}
//Convert GeoJSON Geometry into WKT
UDF2<String, WrappedArray, String> coords2WKT =
(String type, WrappedArray coords) ->{ return Coordinates2WKT.convert.apply(type, coords); };
spark.udf().register("coords2WKT", coords2WKT, DataTypes.StringType);
dataset = dataset.withColumn("geometry",
functions.callUDF("coords2WKT", dataset.col("type"), dataset.col("coordinates")));
dataset = dataset.drop(dataset.col("type")).drop(dataset.col("coordinates"));
return dataset;
}
示例12
@Override
public void write(final Kryo kryo, final Output output, final WrappedArray<T> iterable) {
output.writeVarInt(iterable.size(), true);
JavaConversions.asJavaCollection(iterable).forEach(t -> {
kryo.writeClassAndObject(output, t);
});
}
示例13
@Override
public WrappedArray<T> read(final Kryo kryo, final Input input, final Class<WrappedArray<T>> aClass) {
final int size = input.readVarInt(true);
final Object[] array = new Object[size];
for (int i = 0; i < size; i++) {
array[i] = kryo.readClassAndObject(input);
}
return new WrappedArray.ofRef<>((T[]) array);
}
示例14
private SparkIoRegistry() {
try {
super.register(GryoIo.class, Tuple2.class, new Tuple2Serializer());
super.register(GryoIo.class, Tuple2[].class, null);
super.register(GryoIo.class, Tuple3.class, new Tuple3Serializer());
super.register(GryoIo.class, Tuple3[].class, null);
super.register(GryoIo.class, CompactBuffer.class, new CompactBufferSerializer());
super.register(GryoIo.class, CompactBuffer[].class, null);
super.register(GryoIo.class, CompressedMapStatus.class, null);
super.register(GryoIo.class, BlockManagerId.class, null);
super.register(GryoIo.class, HighlyCompressedMapStatus.class, new ExternalizableSerializer()); // externalizable implemented so its okay
super.register(GryoIo.class, TorrentBroadcast.class, null);
super.register(GryoIo.class, PythonBroadcast.class, null);
super.register(GryoIo.class, BoxedUnit.class, null);
super.register(GryoIo.class, Class.forName("scala.reflect.ClassTag$$anon$1"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("scala.reflect.ManifestFactory$$anon$1"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("org.apache.spark.internal.io.FileCommitProtocol$TaskCommitMessage"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("org.apache.spark.internal.io.FileCommitProtocol$EmptyTaskCommitMessage$"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("scala.collection.immutable.Map$EmptyMap$"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("scala.collection.immutable.Map"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("scala.None$"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("scala.Some$"), new JavaSerializer());
super.register(GryoIo.class, Class.forName("scala.Some"), new JavaSerializer());
super.register(GryoIo.class, WrappedArray.ofRef.class, new WrappedArraySerializer());
super.register(GryoIo.class, MessagePayload.class, null);
super.register(GryoIo.class, ViewIncomingPayload.class, null);
super.register(GryoIo.class, ViewOutgoingPayload.class, null);
super.register(GryoIo.class, ViewPayload.class, null);
super.register(GryoIo.class, SerializableConfiguration.class, new JavaSerializer());
super.register(GryoIo.class, VertexWritable.class, new VertexWritableSerializer());
super.register(GryoIo.class, ObjectWritable.class, new ObjectWritableSerializer());
} catch (final ClassNotFoundException e) {
throw new IllegalStateException(e);
}
}
示例15
public static void main(String[] args) {
SparkSession spark = SparkSession
.builder()
.appName("JavaTokenizerExample")
.getOrCreate();
// $example on$
List<Row> data = Arrays.asList(
RowFactory.create(0, "Hi I heard about Spark"),
RowFactory.create(1, "I wish Java could use case classes"),
RowFactory.create(2, "Logistic,regression,models,are,neat")
);
StructType schema = new StructType(new StructField[]{
new StructField("id", DataTypes.IntegerType, false, Metadata.empty()),
new StructField("sentence", DataTypes.StringType, false, Metadata.empty())
});
Dataset<Row> sentenceDataFrame = spark.createDataFrame(data, schema);
Tokenizer tokenizer = new Tokenizer().setInputCol("sentence").setOutputCol("words");
RegexTokenizer regexTokenizer = new RegexTokenizer()
.setInputCol("sentence")
.setOutputCol("words")
.setPattern("\\W"); // alternatively .setPattern("\\w+").setGaps(false);
spark.udf().register("countTokens", new UDF1<WrappedArray, Integer>() {
@Override
public Integer call(WrappedArray words) {
return words.size();
}
}, DataTypes.IntegerType);
Dataset<Row> tokenized = tokenizer.transform(sentenceDataFrame);
tokenized.select("sentence", "words")
.withColumn("tokens", callUDF("countTokens", col("words"))).show(false);
Dataset<Row> regexTokenized = regexTokenizer.transform(sentenceDataFrame);
regexTokenized.select("sentence", "words")
.withColumn("tokens", callUDF("countTokens", col("words"))).show(false);
// $example off$
spark.stop();
}
示例16
private LinkedHashMap<Class<?>, Serializer<?>> getExtraRegistrations() {
/* The map returned by this method MUST have a fixed iteration order!
*
* The order itself is irrelevant, so long as it is completely stable at runtime.
*
* LinkedHashMap satisfies this requirement (its contract specifies
* iteration in key-insertion-order).
*/
final LinkedHashMap<Class<?>, Serializer<?>> m = new LinkedHashMap<>();
// The following entries were copied from GryoSerializer's constructor
// This could be turned into a static collection on GryoSerializer to avoid
// duplication, but it would be a bit cumbersome to do so without disturbing
// the ordering of the existing entries in that constructor, since not all
// of the entries are for TinkerPop (and the ordering is significant).
try {
m.put(Class.forName("scala.reflect.ClassTag$$anon$1"), new JavaSerializer());
m.put(Class.forName("scala.reflect.ManifestFactory$$anon$1"), new JavaSerializer());
m.put(Class.forName("org.apache.spark.internal.io.FileCommitProtocol$TaskCommitMessage"), new JavaSerializer());
m.put(Class.forName("org.apache.spark.internal.io.FileCommitProtocol$EmptyTaskCommitMessage$"), new JavaSerializer());
} catch (final ClassNotFoundException e) {
throw new IllegalStateException(e.getMessage(), e);
}
m.put(WrappedArray.ofRef.class, null);
m.put(MessagePayload.class, null);
m.put(ViewIncomingPayload.class, null);
m.put(ViewOutgoingPayload.class, null);
m.put(ViewPayload.class, null);
m.put(VertexWritable.class, new UnshadedSerializerAdapter<>(new VertexWritableSerializer()));
m.put(ObjectWritable.class, new UnshadedSerializerAdapter<>(new ObjectWritableSerializer<>()));
//
m.put(HadoopVertex.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.VertexSerializer()));
m.put(HadoopVertexProperty.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.VertexPropertySerializer()));
m.put(HadoopProperty.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.PropertySerializer()));
m.put(HadoopEdge.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.EdgeSerializer()));
//
m.put(ComputerGraph.ComputerVertex.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.VertexSerializer()));
m.put(ComputerGraph.ComputerVertexProperty.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.VertexPropertySerializer()));
m.put(ComputerGraph.ComputerProperty.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.PropertySerializer()));
m.put(ComputerGraph.ComputerEdge.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.EdgeSerializer()));
//
m.put(StarGraph.StarEdge.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.EdgeSerializer()));
m.put(StarGraph.StarVertex.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.VertexSerializer()));
m.put(StarGraph.StarProperty.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.PropertySerializer()));
m.put(StarGraph.StarVertexProperty.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.VertexPropertySerializer()));
//
m.put(MutablePath.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.PathSerializer()));
m.put(ImmutablePath.class, new UnshadedSerializerAdapter<>(new GryoSerializersV1d0.PathSerializer()));
//
m.put(CompactBuffer[].class, null);
// TODO: VoidSerializer is a default serializer and thus, may not be needed (if it is, you can't use FieldSerializer)
// TODO: We will have to copy/paste the shaded DefaultSerializer.VoidSerializer into an unshaded form.
//m.put(void.class, null);
//m.put(Void.class, null);
return m;
}
示例17
@Override
protected List<ContainerEntry> getContained(Object container) {
WrappedArray containedArray = (WrappedArray) container;
List<ContainerEntry> containedEntries = new ArrayList<>();
for (int i = 0; i < containedArray.length(); i++) {
GenericRowWithSchema resourceContainer = (GenericRowWithSchema) containedArray.apply(i);
// The number of contained fields will be low, so this nested loop has low cost
for (int j = 0; j < resourceContainer.schema().fields().length; j++) {
if (resourceContainer.get(j) != null) {
GenericRowWithSchema row = (GenericRowWithSchema) resourceContainer.get(j);
String columnName = resourceContainer.schema().fields()[j].name();
containedEntries.add(new ContainerEntry(columnName, row));
break;
}
}
}
return containedEntries;
}