Python源码示例:nets.nasnet.nasnet._update_hparams()

示例1
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例2
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例3
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例4
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例5
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例6
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例7
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例8
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例9
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例10
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例11
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例12
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例13
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例14
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例15
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例16
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例17
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info(
        'A GPU is available on the machine, consider using NCHW '
        'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(a=images, perm=[0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例18
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info(
        'A GPU is available on the machine, consider using NCHW '
        'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(a=images, perm=[0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps,
                                  hparams.use_bounded_activation)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例19
def build_pnasnet_large(images,
                        num_classes,
                        is_training=True,
                        final_endpoint=None,
                        config=None):
  """Build PNASNet Large model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else large_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d,
                    slim.batch_norm, slim.separable_conv2d,
                    nasnet_utils.factorized_reduction,
                    nasnet_utils.global_avg_pool,
                    nasnet_utils.get_channel_index,
                    nasnet_utils.get_channel_dim],
                   data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint) 
示例20
def build_pnasnet_mobile(images,
                         num_classes,
                         is_training=True,
                         final_endpoint=None,
                         config=None):
  """Build PNASNet Mobile model for the ImageNet Dataset."""
  hparams = copy.deepcopy(config) if config else mobile_imagenet_config()
  # pylint: disable=protected-access
  nasnet._update_hparams(hparams, is_training)
  # pylint: enable=protected-access

  if tf.test.is_gpu_available() and hparams.data_format == 'NHWC':
    tf.logging.info('A GPU is available on the machine, consider using NCHW '
                    'data format for increased speed on GPU.')

  if hparams.data_format == 'NCHW':
    images = tf.transpose(images, [0, 3, 1, 2])

  # Calculate the total number of cells in the network.
  # There is no distinction between reduction and normal cells in PNAS so the
  # total number of cells is equal to the number normal cells plus the number
  # of stem cells (two by default).
  total_num_cells = hparams.num_cells + 2

  normal_cell = PNasNetNormalCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob, total_num_cells,
                                  hparams.total_training_steps)
  with arg_scope(
      [slim.dropout, nasnet_utils.drop_path, slim.batch_norm],
      is_training=is_training):
    with arg_scope(
        [
            slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm,
            slim.separable_conv2d, nasnet_utils.factorized_reduction,
            nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index,
            nasnet_utils.get_channel_dim
        ],
        data_format=hparams.data_format):
      return _build_pnasnet_base(
          images,
          normal_cell=normal_cell,
          num_classes=num_classes,
          hparams=hparams,
          is_training=is_training,
          final_endpoint=final_endpoint)