Java源码示例:com.google.api.client.util.Maps

示例1
@Override
public Collection<V> values() throws IOException {
  lock.lock();
  try {
    // Unfortunately no getKeys() method on MemcacheService, so the only option is to clear all
    // and re-populate the memcache from scratch. This is clearly inefficient.
    if (memcache != null) {
      memcache.clearAll();
    }
    List<V> result = Lists.newArrayList();
    Map<String, V> map = memcache != null ? Maps.<String, V>newHashMap() : null;
    for (Entity entity : query(false)) {
      V value = deserialize(entity);
      result.add(value);
      if (map != null) {
        map.put(entity.getKey().getName(), value);
      }
    }
    if (memcache != null) {
      memcache.putAll(map, memcacheExpiration);
    }
    return Collections.unmodifiableList(result);
  } finally {
    lock.unlock();
  }
}
 
示例2
FileDataStore(FileDataStoreFactory dataStore, File dataDirectory, String id)
    throws IOException {
  super(dataStore, id);
  this.dataFile = new File(dataDirectory, id);
  // error if it is a symbolic link
  if (IOUtils.isSymbolicLink(dataFile)) {
    throw new IOException("unable to use a symbolic link: " + dataFile);
  }
  // create new file (if necessary)
  if (dataFile.createNewFile()) {
    keyValueMap = Maps.newHashMap();
    // save the credentials to create a new file
    save();
  } else {
    // load credentials from existing file
    keyValueMap = IOUtils.deserialize(new FileInputStream(dataFile));
  }
}
 
示例3
FileDataStore(FileDataStoreFactory dataStore, File dataDirectory, String id)
    throws IOException {
  super(dataStore, id);
  this.dataFile = new File(dataDirectory, id);
  // error if it is a symbolic link
  if (IOUtils.isSymbolicLink(dataFile)) {
    throw new IOException("unable to use a symbolic link: " + dataFile);
  }
  // create new file (if necessary)
  if (dataFile.createNewFile()) {
    keyValueMap = Maps.newHashMap();
    // save the credentials to create a new file
    save();
  } else {
    // load credentials from existing file
    keyValueMap = IOUtils.deserialize(new FileInputStream(dataFile));
  }
}
 
示例4
CustomDataStore( CustomDataStoreFactory dataStore, File dataDirectory, String id ) throws IOException {
  super( dataStore, id );
  this.dataDirectory = dataDirectory;
  this.dataFile = new File( this.dataDirectory, getId() );

  if ( IOUtils.isSymbolicLink( this.dataFile ) ) {
    throw new IOException( "unable to use a symbolic link: " + this.dataFile );
  }

  this.keyValueMap = Maps.newHashMap();

  if ( this.dataFile.exists() ) {
    this.keyValueMap = IOUtils.deserialize( new FileInputStream( this.dataFile ) );
  }
}
 
示例5
public VerifyContext(Map<Class<?>, ConfigContainer<?, IConfig<?>>> configContainers) {
    this.configContainers = configContainers;
    this.configContainersByFileName = Maps.newHashMap();
    for (ConfigContainer<?, IConfig<?>> t : configContainers.values()) {
        this.configContainersByFileName.put(t.getMetaData().getFileName(), t);
    }
}
 
示例6
/**
 * Gets all schema for an API config.
 *
 * @return a {@link Map} from {@link TypeToken} to {@link Schema}. If there are no schema for this
 * config, an empty map is returned.
 */
private Map<TypeToken<?>, Schema> getAllTypesForConfig(ApiConfig config) {
  Map<TypeToken<?>, Schema> typesForConfig = types.get(config.getSerializationConfig());
  if (typesForConfig == null) {
    typesForConfig = Maps.newLinkedHashMap();
    types.put(config.getSerializationConfig(), typesForConfig);
  }
  return typesForConfig;
}
 
示例7
private Map<TupleTag<?>, Integer> transformTupleTagsToLabels(TupleTag<?> mainTag, Set<TupleTag<?>> secondaryTags) {
	Map<TupleTag<?>, Integer> tagToLabelMap = Maps.newHashMap();
	tagToLabelMap.put(mainTag, MAIN_TAG_INDEX);
	int count = MAIN_TAG_INDEX + 1;
	for (TupleTag<?> tag : secondaryTags) {
		if (!tagToLabelMap.containsKey(tag)) {
			tagToLabelMap.put(tag, count++);
		}
	}
	return tagToLabelMap;
}
 
示例8
public static <T extends StringType> Map<String, String> convertDefaults(Map<Platform, T> vms) {
    Map<String, String> result = Maps.newHashMap();
    for (Entry<Platform, T> entry : vms.entrySet()) {
        result.put(entry.getKey().value(), entry.getValue().value());
    }
    return result;
}
 
示例9
public static <P extends StringType, T extends StringType, C extends Collection<T>> Map<String, Collection<String>> convertPlatformMap(Map<P, C> vms) {
    Map<String, Collection<String>> result = Maps.newHashMap();
    for (Entry<P, C> entry : vms.entrySet()) {
        result.put(entry.getKey().value(), convertList(entry.getValue()));
    }
    return result;
}
 
示例10
public static <P extends StringType, T extends StringType, C extends StringType> Map<String, Map<String, String>>
    convertDisplayNameMap(Map<P, Map<T, C>> dNs) {
    Map<String, Map<String, String>> result = Maps.newHashMap();
    for (Entry<P, Map<T, C>> entry : dNs.entrySet()) {
        Map<String, String> innerMap = new HashMap<>();
        for (Entry<T, C> tzEntry : entry.getValue().entrySet()) {
            innerMap.put(tzEntry.getKey().value(), tzEntry.getValue().value());
        }
        result.put(entry.getKey().value(), innerMap);
    }
    return result;
}
 
示例11
CustomDataStore( CustomDataStoreFactory dataStore, File dataDirectory, String id ) throws IOException {
  super( dataStore, id );
  this.dataDirectory = dataDirectory;
  this.dataFile = new File( this.dataDirectory, getId() );

  if ( IOUtils.isSymbolicLink( this.dataFile ) ) {
    throw new IOException( "unable to use a symbolic link: " + this.dataFile );
  }

  this.keyValueMap = Maps.newHashMap();

  if ( this.dataFile.exists() ) {
    this.keyValueMap = (HashMap) IOUtils.deserialize( new FileInputStream( this.dataFile ) );
  }
}
 
示例12
@Override
public void translateNode(ParDo.BoundMulti<IN, OUT> transform, FlinkBatchTranslationContext context) {
	DataSet<IN> inputDataSet = context.getInputDataSet(context.getInput(transform));

	final DoFn<IN, OUT> doFn = transform.getFn();

	Map<TupleTag<?>, PCollection<?>> outputs = context.getOutput(transform).getAll();

	Map<TupleTag<?>, Integer> outputMap = Maps.newHashMap();
	// put the main output at index 0, FlinkMultiOutputDoFnFunction also expects this
	outputMap.put(transform.getMainOutputTag(), 0);
	int count = 1;
	for (TupleTag<?> tag: outputs.keySet()) {
		if (!outputMap.containsKey(tag)) {
			outputMap.put(tag, count++);
		}
	}

	// collect all output Coders and create a UnionCoder for our tagged outputs
	List<Coder<?>> outputCoders = Lists.newArrayList();
	for (PCollection<?> coll: outputs.values()) {
		outputCoders.add(coll.getCoder());
	}

	UnionCoder unionCoder = UnionCoder.of(outputCoders);

	@SuppressWarnings("unchecked")
	TypeInformation<RawUnionValue> typeInformation = new CoderTypeInformation<>(unionCoder);

	@SuppressWarnings("unchecked")
	FlinkMultiOutputDoFnFunction<IN, OUT> doFnWrapper = new FlinkMultiOutputDoFnFunction(doFn, context.getPipelineOptions(), outputMap);
	MapPartitionOperator<IN, RawUnionValue> outputDataSet = new MapPartitionOperator<>(inputDataSet, typeInformation, doFnWrapper, transform.getName());

	transformSideInputs(transform.getSideInputs(), outputDataSet, context);

	for (Map.Entry<TupleTag<?>, PCollection<?>> output: outputs.entrySet()) {
		TypeInformation<Object> outputType = context.getTypeInfo(output.getValue());
		int outputTag = outputMap.get(output.getKey());
		FlinkMultiOutputPruningFunction<Object> pruningFunction = new FlinkMultiOutputPruningFunction<>(outputTag);
		FlatMapOperator<RawUnionValue, Object> pruningOperator = new
				FlatMapOperator<>(outputDataSet, outputType,
				pruningFunction, output.getValue().getName());
		context.setOutputDataSet(output.getValue(), pruningOperator);

	}
}