Python源码示例:nets.nasnet.nasnet.cifar_config()

示例1
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例2
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例3
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例4
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例5
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例6
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例7
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例8
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例9
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例10
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例11
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例12
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例13
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例14
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例15
def testUseBoundedAcitvationCifarModel(self):
    batch_size = 1
    height, width = 32, 32
    num_classes = 10
    for use_bounded_activation in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      config = nasnet.cifar_config()
      config.set_hparam('use_bounded_activation', use_bounded_activation)
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, _ = nasnet.build_nasnet_cifar(
            inputs, num_classes, config=config)
      for node in tf.get_default_graph().as_graph_def().node:
        if node.op.startswith('Relu'):
          self.assertEqual(node.op == 'Relu6', use_bounded_activation) 
示例16
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例17
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例18
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例19
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例20
def testUseBoundedAcitvationCifarModel(self):
    batch_size = 1
    height, width = 32, 32
    num_classes = 10
    for use_bounded_activation in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      config = nasnet.cifar_config()
      config.set_hparam('use_bounded_activation', use_bounded_activation)
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, _ = nasnet.build_nasnet_cifar(
            inputs, num_classes, config=config)
      for node in tf.get_default_graph().as_graph_def().node:
        if node.op.startswith('Relu'):
          self.assertEqual(node.op == 'Relu6', use_bounded_activation) 
示例21
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例22
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例23
def testUseBoundedAcitvationCifarModel(self):
    batch_size = 1
    height, width = 32, 32
    num_classes = 10
    for use_bounded_activation in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      config = nasnet.cifar_config()
      config.set_hparam('use_bounded_activation', use_bounded_activation)
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, _ = nasnet.build_nasnet_cifar(
            inputs, num_classes, config=config)
      for node in tf.get_default_graph().as_graph_def().node:
        if node.op.startswith('Relu'):
          self.assertEqual(node.op == 'Relu6', use_bounded_activation) 
示例24
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random.uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例25
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random.uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32]) 
示例26
def testUseBoundedAcitvationCifarModel(self):
    batch_size = 1
    height, width = 32, 32
    num_classes = 10
    for use_bounded_activation in (True, False):
      tf.reset_default_graph()
      inputs = tf.random.uniform((batch_size, height, width, 3))
      config = nasnet.cifar_config()
      config.set_hparam('use_bounded_activation', use_bounded_activation)
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, _ = nasnet.build_nasnet_cifar(
            inputs, num_classes, config=config)
      for node in tf.get_default_graph().as_graph_def().node:
        if node.op.startswith('Relu'):
          self.assertEqual(node.op == 'Relu6', use_bounded_activation) 
示例27
def testNoAuxHeadCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    for use_aux_head in (True, False):
      tf.reset_default_graph()
      inputs = tf.random_uniform((batch_size, height, width, 3))
      tf.train.create_global_step()
      config = nasnet.cifar_config()
      config.set_hparam('use_aux_head', int(use_aux_head))
      with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
        _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
                                                  config=config)
      self.assertEqual('AuxLogits' in end_points, use_aux_head) 
示例28
def testOverrideHParamsCifarModel(self):
    batch_size = 5
    height, width = 32, 32
    num_classes = 10
    inputs = tf.random_uniform((batch_size, height, width, 3))
    tf.train.create_global_step()
    config = nasnet.cifar_config()
    config.set_hparam('data_format', 'NCHW')
    with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
      _, end_points = nasnet.build_nasnet_cifar(
          inputs, num_classes, config=config)
    self.assertListEqual(
        end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32])