Python源码示例:aiohttp.ClientTimeout()
示例1
def __init__(self, api, host, port, username, password,
iter_cnt=-1, iter_delay=600,
task_timeout=120, worker_cnt=4,
post_timeout=60, no_verify_ssl=False):
'''[summary]
'''
self._api = api
self._workers = []
self._iter_cnt = iter_cnt
self._iter_delay = iter_delay
self._worker_cnt = worker_cnt
self._task_queue = Queue()
self._task_timeout = task_timeout
self._output_lock = Lock()
self._url = f'https://{host}:{port}/mkctf-api/healthcheck'
self._ssl = False if no_verify_ssl else None
self._auth = BasicAuth(username, password)
self._post_timeout = ClientTimeout(total=post_timeout)
示例2
def push(self, host, port=443, tags=[], categories=[],
username='', password='', no_verify_ssl=False):
'''Push challenge configuration to a scoreboard
'''
self.__assert_valid_repo()
challenges = []
for challenge in self._repo.scan(tags, categories):
challenges.append(challenge.conf.raw)
url = f'https://{host}:{port}/mkctf-api/push'
ssl = False if no_verify_ssl else None
auth = BasicAuth(username, password)
timeout = ClientTimeout(total=2*60)
async with ClientSession(auth=auth, timeout=timeout) as session:
async with session.post(url, ssl=ssl, json={'challenges': challenges}) as resp:
if resp.status < 400:
app_log.info("push succeeded.")
return {'pushed': True}
app_log.error("push failed.")
return {'pushed': False}
示例3
def _get_response(self, full_url, headers, timeout):
try:
timeout = aiohttp.ClientTimeout(total=self.TIMEOUT_SECONDS)
async with self._session(timeout=timeout) as session:
async with session.get(full_url, headers=headers) as resp:
if resp.status != 200:
text = await resp.text()
raise MetadataRetrievalError(
error_msg=(
"Received non 200 response (%d) "
"from ECS metadata: %s"
) % (resp.status, text))
try:
return await resp.json()
except ValueError:
text = await resp.text()
error_msg = (
"Unable to parse JSON returned from ECS metadata services"
)
logger.debug('%s:%s', error_msg, text)
raise MetadataRetrievalError(error_msg=error_msg)
except RETRYABLE_HTTP_ERRORS as e:
error_msg = ("Received error when attempting to retrieve "
"ECS metadata: %s" % e)
raise MetadataRetrievalError(error_msg=error_msg)
示例4
def test_connect_proxy_ip(loop):
tr, proto = mock.Mock(name='transport'), mock.Mock(name='protocol')
with mock.patch('aiosocks.connector.create_connection',
make_mocked_coro((tr, proto))):
loop.getaddrinfo = make_mocked_coro(
[[0, 0, 0, 0, ['127.0.0.1', 1080]]])
req = ProxyClientRequest(
'GET', URL('http://python.org'), loop=loop,
proxy=URL('socks5://proxy.org'))
connector = ProxyConnector(loop=loop)
conn = await connector.connect(req, [], ClientTimeout())
assert loop.getaddrinfo.called
assert conn.protocol is proto
conn.close()
示例5
def test_connect_proxy_domain():
tr, proto = mock.Mock(name='transport'), mock.Mock(name='protocol')
with mock.patch('aiosocks.connector.create_connection',
make_mocked_coro((tr, proto))):
loop_mock = mock.Mock()
req = ProxyClientRequest(
'GET', URL('http://python.org'), loop=loop_mock,
proxy=URL('socks5://proxy.example'))
connector = ProxyConnector(loop=loop_mock)
connector._resolve_host = make_mocked_coro([mock.MagicMock()])
conn = await connector.connect(req, [], ClientTimeout())
assert connector._resolve_host.call_count == 1
assert conn.protocol is proto
conn.close()
示例6
def test_connect_remote_resolve(loop):
tr, proto = mock.Mock(name='transport'), mock.Mock(name='protocol')
with mock.patch('aiosocks.connector.create_connection',
make_mocked_coro((tr, proto))):
req = ProxyClientRequest(
'GET', URL('http://python.org'), loop=loop,
proxy=URL('socks5://127.0.0.1'))
connector = ProxyConnector(loop=loop, remote_resolve=True)
connector._resolve_host = make_mocked_coro([mock.MagicMock()])
conn = await connector.connect(req, [], ClientTimeout())
assert connector._resolve_host.call_count == 1
assert conn.protocol is proto
conn.close()
示例7
def test_connect_locale_resolve(loop):
tr, proto = mock.Mock(name='transport'), mock.Mock(name='protocol')
with mock.patch('aiosocks.connector.create_connection',
make_mocked_coro((tr, proto))):
req = ProxyClientRequest(
'GET', URL('http://python.org'), loop=loop,
proxy=URL('socks5://proxy.example'))
connector = ProxyConnector(loop=loop, remote_resolve=False)
connector._resolve_host = make_mocked_coro([mock.MagicMock()])
conn = await connector.connect(req, [], ClientTimeout())
assert connector._resolve_host.call_count == 2
assert conn.protocol is proto
conn.close()
示例8
def test_proxy_connect_http(loop):
tr, proto = mock.Mock(name='transport'), mock.Mock(name='protocol')
loop_mock = mock.Mock()
loop_mock.getaddrinfo = make_mocked_coro([
[0, 0, 0, 0, ['127.0.0.1', 1080]]])
loop_mock.create_connection = make_mocked_coro((tr, proto))
loop_mock.create_task.return_value = asyncio.Task(
make_mocked_coro([
{'host': 'host', 'port': 80, 'family': 1,
'hostname': 'hostname', 'flags': 11, 'proto': 'proto'}])())
req = ProxyClientRequest(
'GET', URL('http://python.org'), loop=loop,
proxy=URL('http://127.0.0.1'))
connector = ProxyConnector(loop=loop_mock)
await connector.connect(req, [], ClientTimeout())
示例9
def __init__(self):
super().__init__()
self.setLayout(QVBoxLayout())
self.lblStatus = QLabel('Idle', self)
self.layout().addWidget(self.lblStatus)
self.editUrl = QLineEdit(self._DEF_URL, self)
self.layout().addWidget(self.editUrl)
self.editResponse = QTextEdit('', self)
self.layout().addWidget(self.editResponse)
self.btnFetch = QPushButton('Fetch', self)
self.btnFetch.clicked.connect(self.on_btnFetch_clicked)
self.layout().addWidget(self.btnFetch)
self.session = aiohttp.ClientSession(
loop=asyncio.get_event_loop(),
timeout=aiohttp.ClientTimeout(total=self._SESSION_TIMEOUT))
示例10
def __init__(self, ports=[80], extended=False):
RobotAdapter.__init__(self, ports, extended)
self.rosin_nodes = ['/streaming_client', # ABB
'/motion_download_interface', # ABB
'/robot_state', # ABB
'/joint_trajectory_action', # ABB
'/kuka_eki_hw_interface', # KUKA
'/controller_spawner', # KUKA
'/motion_streaming_interface', # FANUC
'/industrial_robot_client', # FANUC
'/joint_state', # FANUC
'/kuka_rsi_simulator' # KUKA
]
self.timeout = aiohttp.ClientTimeout(total=3)
self.logger = logging.getLogger(__name__)
self.hosts = []
self.rate = 1000
示例11
def subscribe(self):
# This connection should live ~forever, so disable some timeouts.
timeout = aiohttp.ClientTimeout(
total=None, sock_read=None, connect=30, sock_connect=30,
)
async with aiohttp.ClientSession(timeout=timeout) as session:
payload = '{"type":"SUBSCRIBE"}'
master_host_port = mesos_tools.find_mesos_leader(cluster=self.cluster)
async with session.post(
f"http://{master_host_port}/api/v1",
data=payload,
# allow_redirects=True,
headers={"Content-Type": "application/json"},
timeout=timeout,
) as resp:
while True:
_size = await resp.content.readline()
if not _size:
break
size = int(_size)
record = await resp.content.readexactly(size)
yield json.loads(record)
示例12
def create_client_session(*args, **kwargs) -> aiohttp.ClientSession:
"""
Creates client session with resonable defaults.
For details about available parameters refer to
`aiohttp.ClientSession <https://docs.aiohttp.org/en/stable/client_reference.html>`_
Examplary customization:
.. code-block:: python
from galaxy.http import create_client_session, create_tcp_connector
session = create_client_session(
headers={
"Keep-Alive": "true"
},
connector=create_tcp_connector(limit=40),
timeout=100)
"""
kwargs.setdefault("connector", create_tcp_connector())
kwargs.setdefault("timeout", aiohttp.ClientTimeout(total=DEFAULT_TIMEOUT))
kwargs.setdefault("raise_for_status", True)
# due to https://github.com/python/mypy/issues/4001
return aiohttp.ClientSession(*args, **kwargs) # type: ignore
示例13
def _start_detached(
self, timeout: aiohttp.ClientTimeout = None, tty: bool = False,
) -> bytes:
if self._tty is None:
await self.inspect() # should restore tty
assert self._tty is not None
async with self.docker._query(
f"exec/{self._id}/start",
method="POST",
headers={"Content-Type": "application/json"},
data=json.dumps({"Detach": True, "Tty": tty}),
timeout=timeout,
) as response:
result = await response.read()
await response.release()
return result
示例14
def test_can_choose_a_different_timeout_on_client_instantiation(self):
new_timeout = ClientTimeout(total=2, connect=5)
client = HttpClient(timeout=new_timeout)
client.session_class = self.session_class_mock
await client.get(TEST_URL)
client.session_class.assert_called_with(
timeout=new_timeout, headers=ANY, raise_for_status=True
)
client._session.request.assert_awaited_with(
"get",
ANY,
timeout=None,
headers=ANY,
allow_redirects=True,
raise_for_status=True,
)
示例15
def test_can_override_timeout_passing_a_new_timeout_on_the_request(
self
):
"""
client.get(..., timeout=ClientTimeout(...))
"""
timeout = ClientTimeout(connect=1, total=5)
client = HttpClient()
client.session_class = self.session_class_mock
await client.get(TEST_URL, timeout=timeout)
client._session.request.assert_awaited_with(
"get",
ANY,
timeout=timeout,
headers=ANY,
allow_redirects=True,
raise_for_status=True,
)
示例16
def test_can_override_option_to_automatically_raise_when_request_fails(
self
):
timeout = ClientTimeout(connect=1, total=5)
client = HttpClient()
client.session_class = self.session_class_mock
await client.get(TEST_URL, raise_for_status=False)
client._session.request.assert_awaited_with(
"get",
ANY,
timeout=ANY,
headers=ANY,
raise_for_status=False,
allow_redirects=True,
)
示例17
def __aenter__(self) -> "Master":
logger.debug("Initializing Master's aiohttp ClientSession")
custom_headers = {"User-Agent": USER_AGENT}
skip_headers = {"User-Agent"}
aiohttp_timeout = aiohttp.ClientTimeout(
total=self.global_timeout,
sock_connect=self.timeout,
sock_read=self.timeout,
)
self.session = aiohttp.ClientSession(
headers=custom_headers,
skip_auto_headers=skip_headers,
timeout=aiohttp_timeout,
trust_env=True,
raise_for_status=True,
)
return self
示例18
def search(searchterm):
magnet_links = []
timeout = aiohttp.ClientTimeout(total=10)
try:
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(
f"{settings.JACKETT_HOST}/api/v2.0/indexers/all/results?apikey={settings.JACKETT_API_KEY}&Query={searchterm}"
) as resp:
data = await resp.json()
results = data["Results"]
for result in results:
if result.get("MagnetUri") and result.get("Title"):
magnet_links.append(
dict(
seeds=result.get("Seeders", 0),
title=result["Title"],
magnet=result["MagnetUri"],
)
)
except Exception:
log.write_log()
return magnet_links
示例19
def cmd_setavatar(self, message, url=None):
"""
Usage:
{command_prefix}setavatar [url]
Changes the bot's avatar.
Attaching a file and leaving the url parameter blank also works.
"""
if message.attachments:
thing = message.attachments[0].url
elif url:
thing = url.strip('<>')
else:
raise exceptions.CommandError("You must provide a URL or attach a file.", expire_in=20)
try:
timeout = aiohttp.ClientTimeout(total=10)
async with self.aiosession.get(thing, timeout=timeout) as res:
await self.user.edit(avatar=await res.read())
except Exception as e:
raise exceptions.CommandError("Unable to change avatar: {}".format(e), expire_in=20)
return Response("Changed the bot's avatar.", delete_after=20)
示例20
def __init__(self, *, timeout=defaults.TIMEOUT, loop):
self._loop = loop
self._timeout = timeout
self._resolver = aiodns.DNSResolver(timeout=timeout, loop=loop)
self._http_timeout = aiohttp.ClientTimeout(total=timeout)
self._proxy_info = aiohttp.helpers.proxies_from_env().get('https', None)
self._logger = logging.getLogger("RES")
if self._proxy_info is None:
self._proxy = None
self._proxy_auth = None
else:
self._proxy = self._proxy_info.proxy
self._proxy_auth = self._proxy_info.proxy_auth
# pylint: disable=too-many-locals,too-many-branches,too-many-return-statements
示例21
def fetch(url, proxy_pool, timeout, loop):
resp, proxy = None, None
try:
print('Waiting a proxy...')
proxy = await proxy_pool.get(scheme=urlparse(url).scheme)
print('Found proxy:', proxy)
proxy_url = 'http://%s:%d' % (proxy.host, proxy.port)
_timeout = aiohttp.ClientTimeout(total=timeout)
async with aiohttp.ClientSession(
timeout=_timeout, loop=loop
) as session, session.get(url, proxy=proxy_url) as response:
resp = await response.text()
except (
aiohttp.errors.ClientOSError,
aiohttp.errors.ClientResponseError,
aiohttp.errors.ServerDisconnectedError,
asyncio.TimeoutError,
NoProxyError,
) as e:
print('Error!\nURL: %s;\nError: %r\n', url, e)
finally:
if proxy:
proxy_pool.put(proxy)
return (url, resp)
示例22
def _get(self, url, data=None, headers=None, method='GET'):
page = ''
try:
timeout = aiohttp.ClientTimeout(total=self._timeout)
async with self._sem_provider, self._session.request(
method, url, data=data, headers=headers, timeout=timeout
) as resp:
page = await resp.text()
if resp.status != 200:
log.debug(
'url: %s\nheaders: %s\ncookies: %s\npage:\n%s'
% (url, resp.headers, resp.cookies, page)
)
raise BadStatusError('Status: %s' % resp.status)
except (
UnicodeDecodeError,
BadStatusError,
asyncio.TimeoutError,
aiohttp.ClientOSError,
aiohttp.ClientResponseError,
aiohttp.ServerDisconnectedError,
) as e:
page = ''
log.debug('%s is failed. Error: %r;' % (url, e))
return page
示例23
def get_real_ext_ip(self):
"""Return real external IP address."""
while self._ip_hosts:
try:
timeout = aiohttp.ClientTimeout(total=self._timeout)
async with aiohttp.ClientSession(
timeout=timeout, loop=self._loop
) as session, session.get(self._pop_random_ip_host()) as resp:
ip = await resp.text()
except asyncio.TimeoutError:
pass
else:
ip = ip.strip()
if self.host_is_ip(ip):
log.debug('Real external IP: %s', ip)
break
else:
raise RuntimeError('Could not get the external IP')
return ip
示例24
def post(self, url: str, data: Dict[str, str] = None) -> Response:
"""Perform HTTP POST request.
:param url: the request url
:param data: the data send to server
:return: the response from server
:raise: :exc:`ConnectionError <stellar_sdk.exceptions.ConnectionError>`
"""
try:
response = await self._session.post(url, data=data, timeout=aiohttp.ClientTimeout(total=self.post_timeout))
return Response(
status_code=response.status,
text=await response.text(),
headers=dict(response.headers),
url=str(response.url),
)
except aiohttp.ClientConnectionError as e:
raise ConnectionError(e)
示例25
def __init__(self, user_id: int, shard_count: int = 1,
player=DefaultPlayer, regions: dict = None, connect_back: bool = False):
if not isinstance(user_id, int):
raise TypeError('user_id must be an int (got {}). If the type is None, '
'ensure your bot has fired "on_ready" before instantiating '
'the Lavalink client. Alternatively, you can hardcode your user ID.'
.format(user_id))
if not isinstance(shard_count, int):
raise TypeError('shard_count must be an int with a positive value.')
self._user_id = str(user_id)
self._shard_count = str(shard_count)
self.node_manager = NodeManager(self, regions)
self.player_manager = PlayerManager(self, player)
self._connect_back = connect_back
self._logger = logging.getLogger('lavalink')
self._session = aiohttp.ClientSession(
timeout=aiohttp.ClientTimeout(total=30)
)
示例26
def get_response(self, url: str) -> dict:
"""Get responses from twitch after checking rate limits"""
await self.oauth_check()
header = await self.get_header()
await self.wait_for_rate_limit_reset()
async with aiohttp.ClientSession() as session:
async with session.get(
url, headers=header, timeout=aiohttp.ClientTimeout(total=None)
) as resp:
remaining = resp.headers.get("Ratelimit-Remaining")
if remaining:
self.rate_limit_remaining = int(remaining)
reset = resp.headers.get("Ratelimit-Reset")
if reset:
self.rate_limit_resets.add(int(reset))
if resp.status == 429:
log.info("Trying again")
return await self.get_response(url)
return await resp.json()
#####################################################################################
示例27
def __init__(self, address: str,
send_interval: float = 5,
loop: OptLoop = None,
*,
send_max_size: int = 100,
send_attempt_count: int = 3,
send_timeout: Optional[aiohttp.ClientTimeout] = None
) -> None:
self._address = URL(address)
self._queue: DataList = []
self._closing = False
self._send_interval = send_interval
self._loop = loop or asyncio.get_event_loop()
if send_timeout is None:
send_timeout = DEFAULT_TIMEOUT
self._session = aiohttp.ClientSession(
loop=self._loop, timeout=send_timeout,
headers={'Content-Type': 'application/json'})
self._batch_manager = BatchManager(send_max_size,
send_interval, send_attempt_count,
self._send_data,
self._loop)
示例28
def connect(self) -> None:
"""Coroutine which will:
- create an aiohttp ClientSession() as self.session
Should be cleaned with a call to the close coroutine
"""
if self.session is None:
client_session_args: Dict[str, Any] = {
"cookies": self.cookies,
"headers": self.headers,
"auth": self.auth,
}
if self.timeout is not None:
client_session_args["timeout"] = aiohttp.ClientTimeout(
total=self.timeout
)
# Adding custom parameters passed from init
client_session_args.update(self.client_session_args)
self.session = aiohttp.ClientSession(**client_session_args)
else:
raise TransportAlreadyConnected("Transport is already connected")
示例29
def __init__(
self,
endpoint: QnAMakerEndpoint,
options: QnAMakerOptions = None,
http_client: ClientSession = None,
telemetry_client: BotTelemetryClient = None,
log_personal_information: bool = None,
):
super().__init__(log_personal_information, telemetry_client)
if not isinstance(endpoint, QnAMakerEndpoint):
raise TypeError(
"QnAMaker.__init__(): endpoint is not an instance of QnAMakerEndpoint"
)
self._endpoint: str = endpoint
opt = options or QnAMakerOptions()
self._validate_options(opt)
instance_timeout = ClientTimeout(total=opt.timeout / 1000)
self._http_client = http_client or ClientSession(timeout=instance_timeout)
self.telemetry_client: Union[
BotTelemetryClient, NullTelemetryClient
] = telemetry_client or NullTelemetryClient()
self.log_personal_information = log_personal_information or False
self._generate_answer_helper = GenerateAnswerUtils(
self.telemetry_client, self._endpoint, options, self._http_client
)
self._active_learning_train_helper = TrainUtils(
self._endpoint, self._http_client
)
示例30
def run(self, host):
tasks = []
# 默认limit=100,enable_cleanup_closed设置为True防止ssl泄露,ttl_dns_cache调高dns缓存
conn = aiohttp.TCPConnector(
limit=LIMIT,
enable_cleanup_closed=True,
ttl_dns_cache=100,
ssl=False,
)
timeout = aiohttp.ClientTimeout(total=60, connect=2)
async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
for url in self.urls:
task = asyncio.ensure_future(self.scan(host, url, session))
tasks.append(task)
# gather方法是所有请求完成后才有输出
_ = await asyncio.gather(*tasks)
# for i in asyncio.as_completed(tasks): # 类似于线程池中的task一样
# answer = await i
# 创建启动任务