Java源码示例:com.google.common.flogger.LoggerConfig
示例1
@Before
public void before() {
when(lazyGroupsConnection.get()).thenReturn(groupsConnection);
LoggerConfig.getConfig(AuthenticatedRegistrarAccessor.class).addHandler(testLogHandler);
// persistResource(loadRegistrar(ADMIN_CLIENT_ID));
persistResource(
loadRegistrar(REAL_CLIENT_ID_WITHOUT_CONTACT)
.asBuilder()
.setClientId(OTE_CLIENT_ID_WITHOUT_CONTACT)
.setType(Registrar.Type.OTE)
.setIanaIdentifier(null)
.build());
persistResource(
loadRegistrar(REAL_CLIENT_ID_WITHOUT_CONTACT)
.asBuilder()
.setClientId(ADMIN_CLIENT_ID)
.setType(Registrar.Type.OTE)
.setIanaIdentifier(null)
.build());
when(groupsConnection.isMemberOfGroup(any(), any())).thenReturn(false);
}
示例2
@Before
public void before() {
LoggerConfig.getConfig(FlowRunner.class).addHandler(handler);
flowRunner.clientId = "TheRegistrar";
flowRunner.credentials = new PasswordOnlyTransportCredentials();
flowRunner.eppRequestSource = EppRequestSource.UNIT_TEST;
flowRunner.flowProvider = TestCommandFlow::new;
flowRunner.flowClass = TestCommandFlow.class;
flowRunner.inputXmlBytes = "<xml/>".getBytes(UTF_8);
flowRunner.isDryRun = false;
flowRunner.isSuperuser = false;
flowRunner.isTransactional = false;
flowRunner.sessionMetadata =
new StatelessRequestSessionMetadata("TheRegistrar", ImmutableSet.of());
flowRunner.trid = Trid.create("client-123", "server-456");
flowRunner.flowReporter = Mockito.mock(FlowReporter.class);
}
示例3
@Test
public void testUndeclaredExtensionsLogged() throws Exception {
TestLogHandler handler = new TestLogHandler();
LoggerConfig.getConfig(ExtensionManager.class).addHandler(handler);
ExtensionManager manager =
new TestInstanceBuilder()
.setEppRequestSource(EppRequestSource.TOOL)
.setDeclaredUris()
.setSuppliedExtensions(MetadataExtension.class)
.build();
manager.register(MetadataExtension.class);
manager.validate();
ImmutableList.Builder<String> logMessages = new ImmutableList.Builder<>();
for (LogRecord record : handler.getStoredLogRecords()) {
logMessages.add(record.getMessage());
}
assertThat(logMessages.build())
.contains(
"Client clientId is attempting to run HelloFlow without declaring "
+ "URIs [urn:google:params:xml:ns:metadata-1.0] on login");
}
示例4
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
LoggerConfig.getConfig(DynamicFileListRecordReader.class).setLevel(Level.FINE);
// Set up a Configuration which will case "gs://" to grab an InMemoryGoogleHadoopFileSystem.
config = InMemoryGoogleHadoopFileSystem.getSampleConfiguration();
when(mockTaskContext.getConfiguration()).thenReturn(config);
basePath = new Path("gs://foo-bucket/");
shardPath = new Path(basePath, "shard0/data-*.json");
estimatedNumRecords = 2;
fileSystem = basePath.getFileSystem(config);
fileSystem.mkdirs(shardPath.getParent());
// Instead of actually blocking, make our mockSleeper throw an exception that we can catch
// whenever the reader would otherwise be blocking.
doThrow(new RuntimeException(SLEEP_ID)).when(mockSleeper).sleep(anyLong());
resetRecordReader();
}
示例5
/**
* Configure logging parameters depending on the {@link Environment}.
*
* <p>If not running locally, set the logging formatter to {@link GcpJsonFormatter} that formats
* the log in a single-line json string printed to {@code STDOUT} or {@code STDERR}, will be
* correctly parsed by Stackdriver logging.
*
* @see <a href="https://cloud.google.com/kubernetes-engine/docs/how-to/logging#best_practices">
* Logging Best Practices</a>
*/
private void configureLogging() {
// Remove all other handlers on the root logger to avoid double logging.
LoggerConfig rootLoggerConfig = LoggerConfig.getConfig("");
Arrays.asList(rootLoggerConfig.getHandlers()).forEach(rootLoggerConfig::removeHandler);
// If running on in a non-local environment, use GCP JSON formatter.
Handler rootHandler = new ConsoleHandler();
rootHandler.setLevel(Level.FINE);
if (env != Environment.LOCAL) {
rootHandler.setFormatter(new GcpJsonFormatter());
}
rootLoggerConfig.addHandler(rootHandler);
if (log) {
// The LoggingHandler records logs at LogLevel.DEBUG (internal Netty log level), which
// corresponds to Level.FINE (JUL log level). It uses a JUL logger with the name
// "io.netty.handler.logging.LoggingHandler" to actually process the logs. This JUL logger is
// set to Level.FINE if the --log parameter is passed, so that it does not filter out logs
// that the LoggingHandler writes. Otherwise the logs are silently ignored because the default
// JUL logger level is Level.INFO.
LoggerConfig.getConfig(LoggingHandler.class).setLevel(Level.FINE);
// Log source IP information if --log parameter is passed. This is considered PII and should
// only be used in non-production environment for debugging purpose.
LoggerConfig.getConfig(ProxyProtocolHandler.class).setLevel(Level.FINE);
}
}
示例6
@Before
public void before() {
LoggerConfig.getConfig(FlowReporter.class).addHandler(handler);
flowReporter.trid = Trid.create("client-123", "server-456");
flowReporter.clientId = "TheRegistrar";
flowReporter.inputXmlBytes = "<xml/>".getBytes(UTF_8);
flowReporter.flowClass = TestCommandFlow.class;
flowReporter.eppInput = mock(EppInput.class);
when(flowReporter.eppInput.getCommandType()).thenReturn("info");
when(flowReporter.eppInput.getResourceType()).thenReturn(Optional.of("domain"));
when(flowReporter.eppInput.getSingleTargetId()).thenReturn(Optional.of("target.foo"));
when(flowReporter.eppInput.getTargetIds()).thenReturn(ImmutableList.of("target.foo"));
}
示例7
@Before
public void beforeResourceFlowTestCase() {
// Attach TestLogHandler to the root logger so it has access to all log messages.
// Note that in theory for assertIcannReportingActivityFieldLogged() below it would suffice to
// attach it only to the FlowRunner logger, but for some reason this doesn't work for all flows.
LoggerConfig.getConfig("").addHandler(logHandler);
}
示例8
@Before
public void before() throws Exception {
diffLister.gcsService = gcsService;
diffLister.gcsBucket = GCS_BUCKET;
diffLister.executor = newDirectExecutorService();
for (int i = 0; i < 5; i++) {
gcsService.createOrReplace(
new GcsFilename(GCS_BUCKET, DIFF_FILE_PREFIX + now.minusMinutes(i)),
new GcsFileOptions.Builder()
.addUserMetadata(LOWER_BOUND_CHECKPOINT, now.minusMinutes(i + 1).toString())
.build(),
ByteBuffer.wrap(new byte[]{1, 2, 3}));
}
LoggerConfig.getConfig(GcsDiffFileLister.class).addHandler(logHandler);
}
示例9
@Before
public void before() throws Exception {
action.bigquery = bigquery;
when(bigquery.jobs()).thenReturn(bigqueryJobs);
when(bigqueryJobs.get(PROJECT_ID, JOB_ID)).thenReturn(bigqueryJobsGet);
action.taskQueueUtils = TASK_QUEUE_UTILS;
action.projectId = PROJECT_ID;
action.jobId = JOB_ID;
action.chainedQueueName = () -> CHAINED_QUEUE_NAME;
LoggerConfig.getConfig(BigqueryPollJobAction.class).addHandler(logHandler);
}
示例10
@Test
public void givenALoggingConfiguration_shouldLogAtTheConfiguredLevel() {
LoggerConfig.of(logger).setLevel(Level.FINE);
logger.atInfo().log("Info Message");
logger.atWarning().log("Warning Message");
logger.atSevere().log("Severe Message");
logger.atFinest().log("Finest Message");
logger.atFine().log("Fine Message");
logger.atFiner().log("Finer Message");
logger.atConfig().log("Config Message");
}
示例11
@Before
public void init() {
createTlds("tld", "xn--kgbechtv", "1.test");
LoggerConfig.getConfig(WhoisReader.class).addHandler(testLogHandler);
}
示例12
@After
public void after() {
LoggerConfig.getConfig(AuthenticatedRegistrarAccessor.class).removeHandler(testLogHandler);
}
示例13
@Before
public void setUp() {
LoggerConfig.getConfig(AsyncTaskEnqueuer.class).addHandler(logHandler);
when(appEngineServiceUtils.getServiceHostname("backend")).thenReturn("backend.hostname.fake");
asyncTaskEnqueuer = createForTesting(appEngineServiceUtils, clock, standardSeconds(90));
}
示例14
@Before public void setUp() {
LoggerConfig.getConfig(RequestStatusCheckerImpl.class).addHandler(logHandler);
RequestStatusCheckerImpl.logService = mock(LogService.class);
}
示例15
@After public void tearDown() {
LoggerConfig.getConfig(RequestStatusCheckerImpl.class).removeHandler(logHandler);
}
示例16
@Before
public void setUp() {
LoggerConfig.getConfig(ComparatorKeyring.class).addHandler(testLogHandler);
}
示例17
@After
public void tearDown() {
LoggerConfig.getConfig(ComparatorKeyring.class).removeHandler(testLogHandler);
}
示例18
@Before
public void setUp()
throws IOException, GeneralSecurityException {
MockitoAnnotations.initMocks(this);
LoggerConfig.getConfig(GsonBigQueryInputFormat.class).setLevel(Level.FINE);
LoggerConfig.getConfig(BigQueryUtils.class).setLevel(Level.FINE);
LoggerConfig.getConfig(GsonRecordReader.class).setLevel(Level.FINE);
bucketHelper = new TestBucketHelper("bq_integration_test");
// A unique per-setUp String to avoid collisions between test runs.
String testId = bucketHelper.getUniqueBucketPrefix();
projectIdValue = TestConfiguration.getInstance().getProjectId();
if (Strings.isNullOrEmpty(projectIdValue)) {
projectIdValue = System.getenv(BIGQUERY_PROJECT_ID_ENVVARNAME);
}
checkArgument(
!Strings.isNullOrEmpty(projectIdValue), "Must provide %s", BIGQUERY_PROJECT_ID_ENVVARNAME);
testDataset = testId + "_dataset";
testBucket = testId + "_bucket";
// We have to create the output dataset ourselves.
// TODO(user): Extract dataset creation into a library which is also used by
// BigQueryOutputCommitter.
Dataset outputDataset = new Dataset();
DatasetReference datasetReference = new DatasetReference();
datasetReference.setProjectId(projectIdValue);
datasetReference.setDatasetId(testDataset);
config = getConfigForGcsFromBigquerySettings(projectIdValue);
BigQueryFactory factory = new BigQueryFactory();
bigqueryInstance = factory.getBigQuery(config);
Bigquery.Datasets datasets = bigqueryInstance.datasets();
outputDataset.setDatasetReference(datasetReference);
logger.atInfo().log(
"Creating temporary dataset '%s' for project '%s'", testDataset, projectIdValue);
datasets.insert(projectIdValue, outputDataset).execute();
Path toCreate = new Path(String.format("gs://%s", testBucket));
FileSystem fs = toCreate.getFileSystem(config);
logger.atInfo().log("Creating temporary test bucket '%s'", toCreate);
fs.mkdirs(toCreate);
// Since the TaskAttemptContext and JobContexts are mostly used just to access a
// "Configuration" object, we'll mock the two contexts to just return our fake configuration
// object with which we'll provide the settings we want to test.
config.clear();
setConfigForGcsFromBigquerySettings();
when(mockTaskAttemptContext.getConfiguration())
.thenReturn(config);
when(mockJobContext.getConfiguration())
.thenReturn(config);
// Have a realistic-looking fake TaskAttemptID.
int taskNumber = 3;
int taskAttempt = 2;
int jobNumber = 42;
String jobIdString = "jobid" + System.currentTimeMillis();
JobID jobId = new JobID(jobIdString, jobNumber);
TaskAttemptID taskAttemptId =
new TaskAttemptID(new TaskID(jobId, false, taskNumber), taskAttempt);
when(mockTaskAttemptContext.getTaskAttemptID())
.thenReturn(taskAttemptId);
when(mockJobContext.getJobID()).thenReturn(jobId);
testTable = testId + "_table_" + jobIdString;
}
示例19
@Before
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
LoggerConfig.getConfig(GsonBigQueryInputFormat.class).setLevel(Level.FINE);
// Create fake job reference.
JobReference fakeJobReference = new JobReference().setProjectId(jobProjectId).setJobId(jobId);
// Create the job result.
jobStatus = new JobStatus();
jobStatus.setState("DONE");
jobStatus.setErrorResult(null);
jobHandle = new Job();
jobHandle.setStatus(jobStatus);
jobHandle.setJobReference(fakeJobReference);
// Mocks for Bigquery jobs.
when(mockBigquery.jobs()).thenReturn(mockBigqueryJobs);
// Mock getting Bigquery job.
when(mockBigqueryJobs.get(any(String.class), any(String.class)))
.thenReturn(mockBigqueryJobsGet);
when(mockBigqueryJobsGet.setLocation(any(String.class))).thenReturn(mockBigqueryJobsGet);
// Mock inserting Bigquery job.
when(mockBigqueryJobs.insert(any(String.class), any(Job.class)))
.thenReturn(mockBigqueryJobsInsert);
// Fake table.
fakeTableSchema = new TableSchema();
fakeTable = new Table().setSchema(fakeTableSchema).setLocation("test_location");
// Mocks for Bigquery tables.
when(mockBigquery.tables()).thenReturn(mockBigqueryTables);
when(mockBigqueryTables.get(any(String.class), any(String.class), any(String.class)))
.thenReturn(mockBigqueryTablesGet);
Datasets datasets = Mockito.mock(Datasets.class);
Datasets.Get datasetsGet = Mockito.mock(Datasets.Get.class);
Dataset dataset = new Dataset().setLocation("test_location");
when(mockBigquery.datasets()).thenReturn(datasets);
when(datasets.get(any(String.class), any(String.class))).thenReturn(datasetsGet);
when(datasetsGet.execute()).thenReturn(dataset);
// Create table reference.
tableRef = new TableReference();
tableRef.setProjectId(projectId);
tableRef.setDatasetId(datasetId);
tableRef.setTableId(tableId);
helper = new BigQueryHelper(mockBigquery);
helper.setErrorExtractor(mockErrorExtractor);
}
示例20
/**
* Creates an in-memory GHFS.
*
* @throws IOException on IOError.
*/
@Before
public void setUp()
throws IOException {
MockitoAnnotations.initMocks(this);
LoggerConfig.getConfig(GsonBigQueryInputFormat.class).setLevel(Level.FINE);
// Set the Hadoop job configuration.
config = new JobConf(InMemoryGoogleHadoopFileSystem.getSampleConfiguration());
config.set(BigQueryConfiguration.PROJECT_ID.getKey(), jobProjectId);
config.set(BigQueryConfiguration.INPUT_PROJECT_ID.getKey(), dataProjectId);
config.set(BigQueryConfiguration.INPUT_DATASET_ID.getKey(), intermediateDataset);
config.set(BigQueryConfiguration.INPUT_TABLE_ID.getKey(), intermediateTable);
config.set(BigQueryConfiguration.TEMP_GCS_PATH.getKey(), "gs://test_bucket/other_path");
config.setClass(
INPUT_FORMAT_CLASS.getKey(),
GsonBigQueryInputFormat.class,
AbstractBigQueryInputFormat.class);
config.setBoolean(BigQueryConfiguration.DELETE_EXPORT_FILES_FROM_GCS.getKey(), true);
CredentialConfigurationUtil.addTestConfigurationSettings(config);
// Create a GoogleHadoopFileSystem to use to initialize and write to
// the in-memory GcsFs.
ghfs = new InMemoryGoogleHadoopFileSystem();
JobReference fakeJobReference =
new JobReference()
.setProjectId(jobProjectId)
.setJobId("bigquery-job-1234")
.setLocation("test-job-location");
// Create the job result.
jobStatus = new JobStatus();
jobStatus.setState("DONE");
jobStatus.setErrorResult(null);
jobHandle = new Job();
jobHandle.setStatus(jobStatus);
jobHandle.setJobReference(fakeJobReference);
// Create table reference.
tableRef = new TableReference();
tableRef.setProjectId(dataProjectId);
tableRef.setDatasetId("test_dataset");
tableRef.setTableId("test_table");
table = new Table().setTableReference(tableRef).setLocation("test_location");
when(mockBigQueryHelper.getRawBigquery())
.thenReturn(mockBigquery);
// Mocks for Bigquery jobs.
when(mockBigquery.jobs())
.thenReturn(mockBigqueryJobs);
// Mock getting Bigquery job.
when(mockBigqueryJobs.get(any(String.class), any(String.class)))
.thenReturn(mockBigqueryJobsGet);
when(mockBigqueryJobsGet.setLocation(any(String.class))).thenReturn(mockBigqueryJobsGet);
when(mockBigqueryJobsGet.execute())
.thenReturn(jobHandle);
// Mock inserting Bigquery job.
when(mockBigqueryJobs.insert(any(String.class), any(Job.class)))
.thenReturn(mockBigqueryJobsInsert);
when(mockBigqueryJobsInsert.execute())
.thenReturn(jobHandle);
// Mocks for Bigquery tables.
when(mockBigquery.tables())
.thenReturn(mockBigqueryTables);
// Mocks for getting Bigquery table.
when(mockBigqueryTables.get(any(String.class), any(String.class), any(String.class)))
.thenReturn(mockBigqueryTablesGet);
when(mockBigqueryTablesGet.execute())
.thenReturn(table);
when(mockBigQueryHelper.getTable(any(TableReference.class)))
.thenReturn(table);
when(mockBigQueryHelper.createJobReference(
any(String.class), any(String.class), any(String.class)))
.thenReturn(fakeJobReference);
when(mockBigQueryHelper.insertJobOrFetchDuplicate(any(String.class), any(Job.class)))
.thenReturn(jobHandle);
}
示例21
public static void main(String[] args) {
LoggerConfig.of(logger).setLevel(Level.FINE);
Exception exception = new Exception("This is a test exception.");
logger.atInfo().withCause(exception).log("Log message with: %s", "Alfred");
}