Python源码示例:tensorflow.core.util.event.Event()
示例1
def setup_logger(out_dir="results", exp_name="test", output_formats=None):
timestamp = utils.make_timestamp()
exp_name = exp_name.replace("/", "_") # environment names can contain /'s
filename = "{}-{}".format(timestamp, exp_name)[0:255] # Linux has filename limit of 255
out_dir = osp.join(out_dir, filename)
os.makedirs(out_dir, exist_ok=True)
logger.configure(folder=osp.join(out_dir, "rl"), format_strs=["tensorboard", "stdout"])
logger_instance = logger.Logger.CURRENT
if output_formats is not None:
logger_instance.output_formats += output_formats
for fmt in logger_instance.output_formats:
if isinstance(fmt, logger.TensorBoardOutputFormat):
writer = fmt.writer
layout = tb_layout()
event = event_pb2.Event(summary=layout)
writer.WriteEvent(event)
writer.Flush()
return out_dir, logger_instance
示例2
def load_tensor_from_event_file(event_file_path):
"""Load a tensor from an event file.
Assumes that the event file contains a `Event` protobuf and the `Event`
protobuf contains a `Tensor` value.
Args:
event_file_path: (`str`) path to the event file.
Returns:
The tensor value loaded from the event file, as a `numpy.ndarray`. For
uninitialized Tensors, returns `None`. For Tensors of data types that
cannot be converted to `numpy.ndarray` (e.g., `tf.resource`), return
`None`.
"""
event = event_pb2.Event()
with gfile.Open(event_file_path, "rb") as f:
event.ParseFromString(f.read())
return load_tensor_from_event(event)
示例3
def add_summary(self, summary, global_step=None):
"""Adds a `Summary` protocol buffer to the event file.
This method wraps the provided summary in an `Event` protocol buffer
and adds it to the event file.
You can pass the result of evaluating any summary op, using
@{tf.Session.run} or
@{tf.Tensor.eval}, to this
function. Alternatively, you can pass a `tf.Summary` protocol
buffer that you populate with your own data. The latter is
commonly done to report evaluation results in event files.
Args:
summary: A `Summary` protocol buffer, optionally serialized as a string.
global_step: Number. Optional global step value to record with the
summary.
"""
if isinstance(summary, bytes):
summ = summary_pb2.Summary()
summ.ParseFromString(summary)
summary = summ
event = event_pb2.Event(summary=summary)
self._add_event(event, global_step)
示例4
def add_meta_graph(self, meta_graph_def, global_step=None):
"""Adds a `MetaGraphDef` to the event file.
The `MetaGraphDef` allows running the given graph via
`saver.import_meta_graph()`.
Args:
meta_graph_def: A `MetaGraphDef` object, often as returned by
`saver.export_meta_graph()`.
global_step: Number. Optional global step counter to record with the
graph.
Raises:
TypeError: If both `meta_graph_def` is not an instance of `MetaGraphDef`.
"""
if not isinstance(meta_graph_def, meta_graph_pb2.MetaGraphDef):
raise TypeError("meta_graph_def must be type MetaGraphDef, saw type: %s" %
type(meta_graph_def))
meta_graph_bytes = meta_graph_def.SerializeToString()
event = event_pb2.Event(meta_graph_def=meta_graph_bytes)
self._add_event(event, global_step)
示例5
def Load(self):
"""Loads all new values from disk.
Calling Load multiple times in a row will not 'drop' events as long as the
return value is not iterated over.
Yields:
All values that were written to disk that have not been yielded yet.
"""
while True:
try:
with errors.raise_exception_on_not_ok_status() as status:
self._reader.GetNext(status)
except (errors.DataLossError, errors.OutOfRangeError):
# We ignore partial read exceptions, because a record may be truncated.
# PyRecordReader holds the offset prior to the failed read, so retrying
# will succeed.
break
event = event_pb2.Event()
event.ParseFromString(self._reader.record())
yield event
logging.debug('No more events in %s', self._file_path)
示例6
def add_summary(self, summary, global_step=None):
"""Adds a `Summary` protocol buffer to the event file.
This method wraps the provided summary in an `Event` protocol buffer
and adds it to the event file.
You can pass the result of evaluating any summary op, using
[`Session.run()`](client.md#Session.run) or
[`Tensor.eval()`](framework.md#Tensor.eval), to this
function. Alternatively, you can pass a `tf.Summary` protocol
buffer that you populate with your own data. The latter is
commonly done to report evaluation results in event files.
Args:
summary: A `Summary` protocol buffer, optionally serialized as a string.
global_step: Number. Optional global step value to record with the
summary.
"""
if isinstance(summary, bytes):
summ = summary_pb2.Summary()
summ.ParseFromString(summary)
summary = summ
event = event_pb2.Event(summary=summary)
self._add_event(event, global_step)
示例7
def add_run_metadata(self, run_metadata, tag, global_step=None):
"""Adds a metadata information for a single session.run() call.
Args:
run_metadata: A `RunMetadata` protobuf object.
tag: The tag name for this metadata.
global_step: Number. Optional global step counter to record with the
StepStats.
Raises:
ValueError: If the provided tag was already used for this type of event.
"""
if tag in self._session_run_tags:
raise ValueError("The provided tag was already used for this event type")
self._session_run_tags[tag] = True
tagged_metadata = event_pb2.TaggedRunMetadata()
tagged_metadata.tag = tag
# Store the `RunMetadata` object as bytes in order to have postponed
# (lazy) deserialization when used later.
tagged_metadata.run_metadata = run_metadata.SerializeToString()
event = event_pb2.Event(tagged_run_metadata=tagged_metadata)
self._add_event(event, global_step)
示例8
def Load(self):
"""Loads all new values from disk.
Calling Load multiple times in a row will not 'drop' events as long as the
return value is not iterated over.
Yields:
All values that were written to disk that have not been yielded yet.
"""
while True:
try:
with errors.raise_exception_on_not_ok_status() as status:
self._reader.GetNext(status)
except (errors.DataLossError, errors.OutOfRangeError):
# We ignore partial read exceptions, because a record may be truncated.
# PyRecordReader holds the offset prior to the failed read, so retrying
# will succeed.
break
event = event_pb2.Event()
event.ParseFromString(self._reader.record())
yield event
logging.debug('No more events in %s', self._file_path)
示例9
def event_to_record(event):
"""
Convert an event protobuf to a TFRecord
Arguments:
event (Event): Event Protobuf to write in TFRecord format
Returns:
TFRecord formatted bytestring
"""
event_str = serialize_protobuf(event)
header = struct.pack('Q', len(event_str))
record = [header,
struct.pack('I', masked_crc32c(header)),
event_str,
struct.pack('I', masked_crc32c(event_str))]
return b"".join(record)
示例10
def add_meta_graph(self, meta_graph_def, global_step=None):
"""Adds a `MetaGraphDef` to the event file.
The `MetaGraphDef` allows running the given graph via
`saver.import_meta_graph()`.
Args:
meta_graph_def: A `MetaGraphDef` object, often as retured by
`saver.export_meta_graph()`.
global_step: Number. Optional global step counter to record with the
graph.
Raises:
TypeError: If both `meta_graph_def` is not an instance of `MetaGraphDef`.
"""
if not isinstance(meta_graph_def, meta_graph_pb2.MetaGraphDef):
raise TypeError("meta_graph_def must be type MetaGraphDef, saw type: %s"
% type(meta_graph_def))
meta_graph_bytes = meta_graph_def.SerializeToString()
event = event_pb2.Event(meta_graph_def=meta_graph_bytes)
self._add_event(event, global_step)
示例11
def add_run_metadata(self, run_metadata, tag, global_step=None):
"""Adds a metadata information for a single session.run() call.
Args:
run_metadata: A `RunMetadata` protobuf object.
tag: The tag name for this metadata.
global_step: Number. Optional global step counter to record with the
StepStats.
Raises:
ValueError: If the provided tag was already used for this type of event.
"""
if tag in self._session_run_tags:
raise ValueError("The provided tag was already used for this event type")
self._session_run_tags[tag] = True
tagged_metadata = event_pb2.TaggedRunMetadata()
tagged_metadata.tag = tag
# Store the `RunMetadata` object as bytes in order to have postponed
# (lazy) deserialization when used later.
tagged_metadata.run_metadata = run_metadata.SerializeToString()
event = event_pb2.Event(tagged_run_metadata=tagged_metadata)
self._add_event(event, global_step)
示例12
def log_key_value(self, key, value, step=None):
def summary_val(k, v):
kwargs = {'tag': k, 'simple_value': float(v)}
return tf.Summary.Value(**kwargs)
summary = tf.Summary(value=[summary_val(key, value)])
event = event_pb2.Event(wall_time=time.time(), summary=summary)
# Use a separate step counter for each key
if key not in self.key_steps:
self.key_steps[key] = 0
if step is not None:
self.key_steps[key] = step
event.step = self.key_steps[key]
self.writer.WriteEvent(event)
self.writer.Flush()
self.key_steps[key] += 1
示例13
def __init__(self, logdir, flush_secs=2, is_dummy=False, dummy_time=None):
self._name_to_tf_name = {}
self._tf_names = set()
self.is_dummy = is_dummy
self.logdir = logdir
self.flush_secs = flush_secs # TODO
self._writer = None
self._dummy_time = dummy_time
if is_dummy:
self.dummy_log = defaultdict(list)
else:
if not os.path.exists(self.logdir):
os.makedirs(self.logdir)
hostname = socket.gethostname()
filename = os.path.join(
self.logdir, 'events.out.tfevents.{}.{}'.format(
int(self._time()), hostname))
self._writer = open(filename, 'wb')
self._write_event(event_pb2.Event(
wall_time=self._time(), step=0, file_version='brain.Event:2'))
示例14
def load_tensor_from_event_file(event_file_path):
"""Load a tensor from an event file.
Assumes that the event file contains a `Event` protobuf and the `Event`
protobuf contains a `Tensor` value.
Args:
event_file_path: (`str`) path to the event file.
Returns:
The tensor value loaded from the event file, as a `numpy.ndarray`. For
uninitialized Tensors, returns `None`. For Tensors of data types that
cannot be converted to `numpy.ndarray` (e.g., `tf.resource`), return
`None`.
"""
event = event_pb2.Event()
with gfile.Open(event_file_path, "rb") as f:
event.ParseFromString(f.read())
return load_tensor_from_event(event)
示例15
def write_values(self, key2val):
summary = tf.Summary(value=[tf.Summary.Value(tag=k, simple_value=float(v))
for (k, v) in key2val.items()])
event = event_pb2.Event(wall_time=time.time(), summary=summary)
event.step = self.step # is there any reason why you'd want to specify the step?
self.evwriter.WriteEvent(event)
self.evwriter.Flush()
self.step += 1
示例16
def load_tensor_from_event(event):
"""Load a tensor from an Event proto.
Args:
event: The Event proto, assumed to hold a tensor value in its
summary.value[0] field.
Returns:
The tensor value loaded from the event file, as a `numpy.ndarray`, if
representation of the tensor value by a `numpy.ndarray` is possible.
For uninitialized Tensors, returns `None`. For Tensors of data types that
cannot be represented as `numpy.ndarray` (e.g., `tf.resource`), return
the `TensorProto` protobuf object without converting it to a
`numpy.ndarray`.
"""
tensor_proto = event.summary.value[0].tensor
if tensor_proto.tensor_content or tensor_proto.string_val:
# Initialized tensor.
if tensor_proto.dtype == types_pb2.DT_RESOURCE:
tensor_value = InconvertibleTensorProto(tensor_proto)
else:
try:
tensor_value = tensor_util.MakeNdarray(tensor_proto)
except KeyError:
tensor_value = InconvertibleTensorProto(tensor_proto)
else:
# Uninitialized tensor or tensor of unconvertible data type.
tensor_value = InconvertibleTensorProto(tensor_proto, False)
return tensor_value
示例17
def _load_graph_def_from_event_file(event_file_path):
event = event_pb2.Event()
with gfile.Open(event_file_path, "rb") as f:
event.ParseFromString(f.read())
return graph_pb2.GraphDef.FromString(event.graph_def)
示例18
def _load_log_message_from_event_file(event_file_path):
event = event_pb2.Event()
with gfile.Open(event_file_path, "rb") as f:
event.ParseFromString(f.read())
return event.log_message.message
示例19
def get_tensor(self):
"""Get tensor from the dump (`Event`) file.
Returns:
The tensor loaded from the dump (`Event`) file.
"""
return load_tensor_from_event_file(self.file_path)
# TODO(cais): Add time unit suffix to timestamp and t0 (us).
示例20
def WriteEvent(self, event):
from tensorflow.core.util.event_pb2 import Event
if not isinstance(event, Event):
raise TypeError("Expected an event_pb2.Event proto, "
" but got %s" % type(event))
return self._WriteSerializedEvent(event.SerializeToString())
示例21
def log(level, message, *args):
"""Conditionally logs `message % args` at the level `level`.
Note that tensorboard_logging verbosity and logging verbosity are separate;
the message will always be passed through to the logging module regardless of
whether it passes the tensorboard_logging verbosity check.
Args:
level: The verbosity level to use. Must be one of
tensorboard_logging.{DEBUG, INFO, WARN, ERROR, FATAL}.
message: The message template to use.
*args: Arguments to interpolate to the message template, if any.
Raises:
ValueError: If `level` is not a valid logging level.
RuntimeError: If the `SummaryWriter` to use has not been set.
"""
if _summary_writer is _sentinel_summary_writer:
raise RuntimeError('Must call set_summary_writer before doing any '
'logging from tensorboard_logging')
_check_verbosity(level)
proto_level = _LEVEL_PROTO_MAP[level]
if proto_level >= _LEVEL_PROTO_MAP[_verbosity]:
log_message = event_pb2.LogMessage(level=proto_level,
message=message % args)
event = event_pb2.Event(wall_time=time.time(), log_message=log_message)
if _summary_writer:
_summary_writer.add_event(event)
logging.log(_PLATFORM_LOGGING_LEVEL_MAP[level], message, *args)
示例22
def __init__(self, logdir, max_queue=10, flush_secs=120,
filename_suffix=None):
"""Creates a `EventFileWriter` and an event file to write to.
On construction the summary writer creates a new event file in `logdir`.
This event file will contain `Event` protocol buffers, which are written to
disk via the add_event method.
The other arguments to the constructor control the asynchronous writes to
the event file:
* `flush_secs`: How often, in seconds, to flush the added summaries
and events to disk.
* `max_queue`: Maximum number of summaries or events pending to be
written to disk before one of the 'add' calls block.
Args:
logdir: A string. Directory where event file will be written.
max_queue: Integer. Size of the queue for pending events and summaries.
flush_secs: Number. How often, in seconds, to flush the
pending events and summaries to disk.
filename_suffix: A string. Every event file's name is suffixed with
`filename_suffix`.
"""
self._logdir = logdir
if not gfile.IsDirectory(self._logdir):
gfile.MakeDirs(self._logdir)
self._event_queue = six.moves.queue.Queue(max_queue)
self._ev_writer = pywrap_tensorflow.EventsWriter(
compat.as_bytes(os.path.join(self._logdir, "events")))
self._flush_secs = flush_secs
self._sentinel_event = self._get_sentinel_event()
if filename_suffix:
self._ev_writer.InitWithSuffix(compat.as_bytes(filename_suffix))
self._closed = False
self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
self._flush_secs, self._sentinel_event)
self._worker.start()
示例23
def _get_sentinel_event(self):
"""Generate a sentinel event for terminating worker."""
return event_pb2.Event()
示例24
def add_session_log(self, session_log, global_step=None):
"""Adds a `SessionLog` protocol buffer to the event file.
This method wraps the provided session in an `Event` protocol buffer
and adds it to the event file.
Args:
session_log: A `SessionLog` protocol buffer.
global_step: Number. Optional global step value to record with the
summary.
"""
event = event_pb2.Event(session_log=session_log)
self._add_event(event, global_step)
示例25
def _add_graph_def(self, graph_def, global_step=None):
graph_bytes = graph_def.SerializeToString()
event = event_pb2.Event(graph_def=graph_bytes)
self._add_event(event, global_step)
示例26
def add_event(self, event):
"""Adds an event to the event file.
Args:
event: An `Event` protocol buffer.
"""
self.event_writer.add_event(event)
示例27
def add_summary(self, summary, global_step=None):
"""Adds a `Summary` protocol buffer to the event file.
This method wraps the provided summary in an `Event` protocol buffer
and adds it to the event file.
You can pass the result of evaluating any summary op, using
@{tf.Session.run} or
@{tf.Tensor.eval}, to this
function. Alternatively, you can pass a `tf.Summary` protocol
buffer that you populate with your own data. The latter is
commonly done to report evaluation results in event files.
Args:
summary: A `Summary` protocol buffer, optionally serialized as a string.
global_step: Number. Optional global step value to record with the
summary.
"""
if isinstance(summary, bytes):
summ = summary_pb2.Summary()
summ.ParseFromString(summary)
summary = summ
event = event_pb2.Event(wall_time=time.time(), summary=summary)
if global_step is not None:
event.step = int(global_step)
self.add_event(event)
示例28
def add_session_log(self, session_log, global_step=None):
"""Adds a `SessionLog` protocol buffer to the event file.
This method wraps the provided session in an `Event` protocol buffer
and adds it to the event file.
Args:
session_log: A `SessionLog` protocol buffer.
global_step: Number. Optional global step value to record with the
summary.
"""
event = event_pb2.Event(wall_time=time.time(), session_log=session_log)
if global_step is not None:
event.step = int(global_step)
self.add_event(event)
示例29
def _add_graph_def(self, graph_def, global_step=None):
graph_bytes = graph_def.SerializeToString()
event = event_pb2.Event(wall_time=time.time(), graph_def=graph_bytes)
if global_step is not None:
event.step = int(global_step)
self._event_queue.put(event)
示例30
def add_run_metadata(self, run_metadata, tag, global_step=None):
"""Adds a metadata information for a single session.run() call.
Args:
run_metadata: A `RunMetadata` protobuf object.
tag: The tag name for this metadata.
global_step: Number. Optional global step counter to record with the
StepStats.
Raises:
ValueError: If the provided tag was already used for this type of event.
"""
if tag in self._session_run_tags:
raise ValueError("The provided tag was already used for this event type")
self._session_run_tags[tag] = True
tagged_metadata = event_pb2.TaggedRunMetadata()
tagged_metadata.tag = tag
# Store the `RunMetadata` object as bytes in order to have postponed
# (lazy) deserialization when used later.
tagged_metadata.run_metadata = run_metadata.SerializeToString()
event = event_pb2.Event(wall_time=time.time(),
tagged_run_metadata=tagged_metadata)
if global_step is not None:
event.step = int(global_step)
self._event_queue.put(event)