Python源码示例:six.moves.queue.put()

示例1
def _request_wrapper(self, queue, url, params, timeout):
        """
        Wrapper to requests used by each thread.

        Parameters
        ----------
        queue : Queue.Queue
            The Queue to write the response from the request in.
        url : str
            The URL to be queried.
        params : dict
            A dictionary of parameters to pass to the request.
        timeout : int
            Timeout to wait for a response to the request.
        """
        response = self.session.get(url, params=params, verify=self.verify, timeout=timeout)
        queue.put(response) 
示例2
def __init__(self, stream, raise_EOF=False, print_output=True, print_new_line=True, name=None):
        '''
        stream: the stream to read from.
                Usually a process' stdout or stderr.
        raise_EOF: if True, raise an UnexpectedEndOfStream
                when stream is EOF before kill
        print_output: if True, print when readline
        '''
        self._s = stream
        self._q = queue.Queue()
        self._lastline = None
        self.name = name or id(self)

        def _populateQueue(stream, queue, kill_event):
            '''
            Collect lines from 'stream' and put them in 'quque'.
            '''
            while not kill_event.is_set():
                line = stream.readline()
                if line:
                    queue.put(line)
                    if print_output:
                        # print only new line
                        if print_new_line and line == self._lastline:
                            continue
                        self._lastline = line
                        LOGGING.debug("[%s]%s" % (self.name, repr(line.strip())))
                elif kill_event.is_set():
                    break
                elif raise_EOF:
                    raise UnexpectedEndOfStream
                else:
                    # print("EndOfStream: %s" % self.name)
                    break

        self._kill_event = Event()
        self._t = Thread(target=_populateQueue, args=(self._s, self._q, self._kill_event), name="nbsp_%s"%self.name)
        self._t.daemon = True
        self._t.start()  # start collecting lines from the stream 
示例3
def _enqueue_output(out, queue):
    """Enqueues lines from an output stream."""
    for line in iter(out.readline, b""):
        queue.put(line.decode("utf-8"))
    out.close() 
示例4
def _queue_record(self, queue, record):
        if not record['gt_boxes']:
            tf.logging.debug(
                'Dropping record {} without gt_boxes.'.format(record))
            return

        # If asking for a limited number per class, only yield if the current
        # example adds at least 1 new class that hasn't been maxed out. For
        # example, if "Persons" has been maxed out but "Bus" has not, a new
        # image containing only instances of "Person" will not be yielded,
        # while an image containing both "Person" and "Bus" instances will.
        if self._class_examples:
            labels_in_image = set([
                self.classes[bbox['label']] for bbox in record['gt_boxes']
            ])
            not_maxed_out = labels_in_image - self._maxed_out_classes

            if not not_maxed_out:
                tf.logging.debug(
                    'Dropping record {} with maxed-out labels: {}'.format(
                        record['filename'], labels_in_image))
                return

            tf.logging.debug(
                'Queuing record {} with labels: {}'.format(
                    record['filename'], labels_in_image))

        self._will_add_record(record)
        queue.put(record) 
示例5
def _complete_records(self, input_queue, output_queue):
        """
        Daemon thread that will complete queued records from `input_queue` and
        put them in `output_queue`, where they will be read and yielded by the
        main thread.

        This is the thread that will actually download the images of the
        dataset.
        """
        while True:
            try:
                partial_record = input_queue.get()

                image_id = partial_record['filename']
                image_raw = read_image(self._get_image_path(image_id))
                image = Image.open(six.BytesIO(image_raw))

                for gt_box in partial_record['gt_boxes']:
                    gt_box['xmin'] *= image.width
                    gt_box['ymin'] *= image.height
                    gt_box['xmax'] *= image.width
                    gt_box['ymax'] *= image.height

                partial_record['width'] = image.width
                partial_record['height'] = image.height
                partial_record['depth'] = 3 if image.mode == 'RGB' else 1
                partial_record['image_raw'] = image_raw

                output_queue.put(partial_record)
            except Exception as e:
                tf.logging.error(
                    'Error processing record: {}'.format(partial_record))
                tf.logging.error(e)
                self.errors += 1
            finally:
                input_queue.task_done() 
示例6
def _queue_record(self, queue, record):
        if not record['gt_boxes']:
            tf.logging.debug(
                'Dropping record {} without gt_boxes.'.format(record))
            return

        # If asking for a limited number per class, only yield if the current
        # example adds at least 1 new class that hasn't been maxed out. For
        # example, if "Persons" has been maxed out but "Bus" has not, a new
        # image containing only instances of "Person" will not be yielded,
        # while an image containing both "Person" and "Bus" instances will.
        if self._class_examples:
            labels_in_image = set([
                self.classes[bbox['label']] for bbox in record['gt_boxes']
            ])
            not_maxed_out = labels_in_image - self._maxed_out_classes

            if not not_maxed_out:
                tf.logging.debug(
                    'Dropping record {} with maxed-out labels: {}'.format(
                        record['filename'], labels_in_image))
                return

            tf.logging.debug(
                'Queuing record {} with labels: {}'.format(
                    record['filename'], labels_in_image))

        self._will_add_record(record)
        queue.put(record) 
示例7
def _complete_records(self, input_queue, output_queue):
        """
        Daemon thread that will complete queued records from `input_queue` and
        put them in `output_queue`, where they will be read and yielded by the
        main thread.

        This is the thread that will actually download the images of the
        dataset.
        """
        while True:
            try:
                partial_record = input_queue.get()

                image_id = partial_record['filename']
                image_raw = read_image(self._get_image_path(image_id))
                image = Image.open(six.BytesIO(image_raw))

                for gt_box in partial_record['gt_boxes']:
                    gt_box['xmin'] *= image.width
                    gt_box['ymin'] *= image.height
                    gt_box['xmax'] *= image.width
                    gt_box['ymax'] *= image.height

                partial_record['width'] = image.width
                partial_record['height'] = image.height
                partial_record['depth'] = 3 if image.mode == 'RGB' else 1
                partial_record['image_raw'] = image_raw

                output_queue.put(partial_record)
            except Exception as e:
                tf.logging.error(
                    'Error processing record: {}'.format(partial_record))
                tf.logging.error(e)
                self.errors += 1
            finally:
                input_queue.task_done() 
示例8
def _queue_record(self, queue, record):
        if not record['gt_boxes']:
            tf.logging.debug(
                'Dropping record {} without gt_boxes.'.format(record))
            return

        # If asking for a limited number per class, only yield if the current
        # example adds at least 1 new class that hasn't been maxed out. For
        # example, if "Persons" has been maxed out but "Bus" has not, a new
        # image containing only instances of "Person" will not be yielded,
        # while an image containing both "Person" and "Bus" instances will.
        if self._class_examples:
            labels_in_image = set([
                self.classes[bbox['label']] for bbox in record['gt_boxes']
            ])
            not_maxed_out = labels_in_image - self._maxed_out_classes

            if not not_maxed_out:
                tf.logging.debug(
                    'Dropping record {} with maxed-out labels: {}'.format(
                        record['filename'], labels_in_image))
                return

            tf.logging.debug(
                'Queuing record {} with labels: {}'.format(
                    record['filename'], labels_in_image))

        self._will_add_record(record)
        queue.put(record) 
示例9
def _complete_records(self, input_queue, output_queue):
        """
        Daemon thread that will complete queued records from `input_queue` and
        put them in `output_queue`, where they will be read and yielded by the
        main thread.

        This is the thread that will actually download the images of the
        dataset.
        """
        while True:
            try:
                partial_record = input_queue.get()

                image_id = partial_record['filename']
                image_raw = read_image(self._get_image_path(image_id))
                image = Image.open(six.BytesIO(image_raw))

                for gt_box in partial_record['gt_boxes']:
                    gt_box['xmin'] *= image.width
                    gt_box['ymin'] *= image.height
                    gt_box['xmax'] *= image.width
                    gt_box['ymax'] *= image.height

                partial_record['width'] = image.width
                partial_record['height'] = image.height
                partial_record['depth'] = 3 if image.mode == 'RGB' else 1
                partial_record['image_raw'] = image_raw

                output_queue.put(partial_record)
            except Exception as e:
                tf.logging.error(
                    'Error processing record: {}'.format(partial_record))
                tf.logging.error(e)
                self.errors += 1
            finally:
                input_queue.task_done()