Python源码示例:azure.storage.blob.BlockBlobService()
示例1
def download_from_container(self, resource_group_name, account_name, container_name, files):
try:
secret_key = meta_lib.AzureMeta().list_storage_keys(resource_group_name, account_name)[0]
block_blob_service = BlockBlobService(account_name=account_name, account_key=secret_key)
for filename in files:
block_blob_service.get_blob_to_path(container_name, filename, filename)
return ''
except azure.common.AzureMissingResourceHttpError:
return ''
except Exception as err:
logging.info(
"Unable to download files from container: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
append_result(str({"error": "Unable to download files from container",
"error_message": str(err) + "\n Traceback: " + traceback.print_exc(
file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
示例2
def list_container_content(self, resource_group_name, account_name, container_name):
try:
result = []
secret_key = list_storage_keys(resource_group_name, account_name)[0]
block_blob_service = BlockBlobService(account_name=account_name, account_key=secret_key)
content = block_blob_service.list_blobs(container_name)
for blob in content:
result.append(blob.name)
return result
except Exception as err:
logging.info(
"Unable to list container content: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
append_result(str({"error": "Unable to list container content",
"error_message": str(err) + "\n Traceback: " + traceback.print_exc(
file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
示例3
def __init__(
self, blob_client: azureblob.BlockBlobService,
resource: str, blob_name: str, nglobalresources: int):
"""ContainerImageSaveThread ctor
:param azureblob.BlockBlobService blob_client: blob client
:param str resource: resource
:param str blob_name: resource blob name
:param int nglobalresources: number of global resources
"""
threading.Thread.__init__(self)
self.blob_client = blob_client
self.resource = resource
self.blob_name = blob_name
self.nglobalresources = nglobalresources
# add to downloading set
with _DIRECTDL_LOCK:
_DIRECTDL_DOWNLOADING.add(self.resource)
示例4
def _check_file_and_upload(blob_client, file, key, container=None):
# type: (azure.storage.blob.BlockBlobService, tuple, str, str) -> None
"""Upload file to blob storage if necessary
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param tuple file: file to upload
:param str key: blob container key
:param str container: absolute container override
"""
if file[0] is None:
return
contname = container or _STORAGE_CONTAINERS[key]
upload = True
# check if blob exists
try:
prop = blob_client.get_blob_properties(contname, file[0])
if (prop.properties.content_settings.content_md5 ==
util.compute_md5_for_file(file[1], True)):
logger.debug(
'remote file is the same for {}, skipping'.format(file[0]))
upload = False
except azure.common.AzureMissingResourceHttpError:
pass
if upload:
logger.info('uploading file {} as {!r}'.format(file[1], file[0]))
blob_client.create_blob_from_path(contname, file[0], str(file[1]))
示例5
def delete_resource_file(blob_client, blob_name, federation_id=None):
# type: (azure.storage.blob.BlockBlobService, str) -> bool
"""Delete a resource file from blob storage
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param str blob_name: blob name
:param str federation_id: federation id
"""
if util.is_not_empty(federation_id):
fedhash = hash_federation_id(federation_id)
container = '{}-{}'.format(
_STORAGE_CONTAINERS['blob_federation'], fedhash)
else:
container = _STORAGE_CONTAINERS['blob_resourcefiles']
try:
blob_client.delete_blob(container, blob_name)
logger.debug('blob {} deleted from container {}'.format(
blob_name, container))
except azure.common.AzureMissingResourceHttpError:
logger.warning('blob {} does not exist in container {}'.format(
blob_name, container))
return False
return True
示例6
def upload_resource_files(blob_client, files):
# type: (azure.storage.blob.BlockBlobService, List[tuple]) -> dict
"""Upload resource files to blob storage
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param list files: files to upload
:rtype: dict
:return: sas url dict
"""
sas_urls = {}
for file in files:
_check_file_and_upload(blob_client, file, 'blob_resourcefiles')
sas_urls[file[0]] = 'https://{}.blob.{}/{}/{}?{}'.format(
_STORAGEACCOUNT, _STORAGEACCOUNTEP,
_STORAGE_CONTAINERS['blob_resourcefiles'], file[0],
blob_client.generate_blob_shared_access_signature(
_STORAGE_CONTAINERS['blob_resourcefiles'], file[0],
permission=azureblob.BlobPermissions.READ,
expiry=datetime.datetime.utcnow() +
datetime.timedelta(days=_DEFAULT_SAS_EXPIRY_DAYS)
)
)
return sas_urls
示例7
def upload_for_nonbatch(blob_client, files, kind):
# type: (azure.storage.blob.BlockBlobService, List[tuple],
# str) -> List[str]
"""Upload files to blob storage for non-batch
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param dict config: configuration dict
:param list files: files to upload
:param str kind: "remotefs", "monitoring" or "federation"
:rtype: list
:return: list of file urls
"""
if kind == 'federation':
kind = '{}_global'.format(kind.lower())
key = 'blob_{}'.format(kind.lower())
ret = []
for file in files:
_check_file_and_upload(blob_client, file, key)
ret.append('https://{}.blob.{}/{}/{}'.format(
_STORAGEACCOUNT, _STORAGEACCOUNTEP,
_STORAGE_CONTAINERS[key], file[0]))
return ret
示例8
def _clear_blob_task_resourcefiles(blob_client, container, config):
# type: (azureblob.BlockBlobService, str, dict) -> None
"""Clear task resource file blobs in container
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param str container: container to clear blobs from
:param dict config: configuration dict
"""
bs = settings.batch_shipyard_settings(config)
envfileloc = '{}taskrf-'.format(bs.storage_entity_prefix)
logger.info('deleting blobs with prefix: {}'.format(envfileloc))
try:
blobs = blob_client.list_blobs(container, prefix=envfileloc)
except azure.common.AzureMissingResourceHttpError:
logger.warning('container not found: {}'.format(container))
else:
for blob in blobs:
blob_client.delete_blob(container, blob.name)
示例9
def delete_storage_containers_boot_diagnostics(
blob_client, vm_name, vm_id):
# type: (azureblob.BlockBlobService, str, str) -> None
"""Delete storage containers used for remotefs bootdiagnostics
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param str vm_name: vm name
:param str vm_id: vm id
"""
name = re.sub('[\W_]+', '', vm_name) # noqa
contname = 'bootdiagnostics-{}-{}'.format(
name[0:min((9, len(name)))], vm_id)
logger.info('deleting container: {}'.format(contname))
try:
blob_client.delete_container(contname)
except azure.common.AzureMissingResourceHttpError:
logger.warning('container not found: {}'.format(contname))
示例10
def cleanup_with_del_pool(blob_client, table_client, config, pool_id=None):
# type: (azureblob.BlockBlobService, azuretable.TableService,
# dict, str) -> None
"""Special cleanup routine in combination with delete pool
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param azure.cosmosdb.table.TableService table_client: table client
:param dict config: configuration dict
:param str pool_id: pool id
"""
if util.is_none_or_empty(pool_id):
pool_id = settings.pool_id(config)
if not util.confirm_action(
config, 'delete/cleanup of Batch Shipyard metadata in storage '
'containers associated with {} pool'.format(pool_id)):
return
clear_storage_containers(
blob_client, table_client, config, tables_only=True, pool_id=pool_id)
delete_storage_containers(
blob_client, table_client, config, skip_tables=True)
示例11
def get_container_sas_token(block_blob_client,
container_name, blob_permissions):
"""
Obtains a shared access signature granting the specified permissions to the
container.
:param block_blob_client: A blob service client.
:type block_blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the Azure Blob storage container.
:param BlobPermissions blob_permissions:
:rtype: str
:return: A SAS token granting the specified permissions to the container.
"""
# Obtain the SAS token for the container, setting the expiry time and
# permissions. In this case, no start time is specified, so the shared
# access signature becomes valid immediately.
container_sas_token = \
block_blob_client.generate_container_shared_access_signature(
container_name,
permission=blob_permissions,
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2))
return container_sas_token
示例12
def get_container_sas_token(block_blob_client,
container_name, blob_permissions):
"""
Obtains a shared access signature granting the specified permissions to the
container.
:param block_blob_client: A blob service client.
:type block_blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the Azure Blob storage container.
:param BlobPermissions blob_permissions:
:rtype: str
:return: A SAS token granting the specified permissions to the container.
"""
# Obtain the SAS token for the container, setting the expiry time and
# permissions. In this case, no start time is specified, so the shared
# access signature becomes valid immediately.
container_sas_token = \
block_blob_client.generate_container_shared_access_signature(
container_name,
permission=blob_permissions,
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2))
return container_sas_token
示例13
def get_blob_client() -> blob.BlockBlobService:
if not storage_resource_id:
return blob.BlockBlobService(
account_name=storage_account_name, account_key=storage_account_key, endpoint_suffix=storage_account_suffix)
else:
credentials = ServicePrincipalCredentials(
client_id=client_id, secret=credential, tenant=tenant_id, resource="https://management.core.windows.net/")
m = RESOURCE_ID_PATTERN.match(storage_resource_id)
accountname = m.group("account")
subscription = m.group("subscription")
resourcegroup = m.group("resourcegroup")
mgmt_client = StorageManagementClient(credentials, subscription)
key = (mgmt_client.storage_accounts.list_keys(resource_group_name=resourcegroup, account_name=accountname)
.keys[0].value)
storage_client = CloudStorageAccount(accountname, key)
return storage_client.create_block_blob_service()
示例14
def create_sas_token(container_name, blob_name, permission, blob_client, expiry=None, timeout=None):
"""
Create a blob sas token
:param blob_client: The storage block blob client to use.
:type blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the container to upload the blob to.
:param str blob_name: The name of the blob to upload the local file to.
:param expiry: The SAS expiry time.
:type expiry: `datetime.datetime`
:param int timeout: timeout in minutes from now for expiry,
will only be used if expiry is not specified
:return: A SAS token
:rtype: str
"""
if expiry is None:
if timeout is None:
timeout = 30
expiry = datetime.datetime.utcnow() + datetime.timedelta(minutes=timeout)
return blob_client.generate_blob_shared_access_signature(
container_name, blob_name, permission=permission, expiry=expiry)
示例15
def initialize(self, host):
"""
The EventProcessorHost can't pass itself to the AzureStorageCheckpointLeaseManager
constructor because it is still being constructed. Do other initialization here
also because it might throw and hence we don't want it in the constructor.
"""
self.host = host
self.storage_client = BlockBlobService(account_name=self.storage_account_name,
account_key=self.storage_account_key,
sas_token=self.storage_sas_token,
endpoint_suffix=self.endpoint_suffix,
connection_string=self.connection_string,
request_session=self.request_session)
self.consumer_group_directory = self.storage_blob_prefix + self.host.eh_config.consumer_group
# Checkpoint Managment Methods
示例16
def _create_credentials() -> tuple:
"""Create storage credentials
:rtype: tuple
:return: (blob_client, queue_client, table_client)
"""
sa, ep, sakey = os.environ['SHIPYARD_STORAGE_ENV'].split(':')
blob_client = azureblob.BlockBlobService(
account_name=sa,
account_key=sakey,
endpoint_suffix=ep)
queue_client = azurequeue.QueueService(
account_name=sa,
account_key=sakey,
endpoint_suffix=ep)
table_client = azuretable.TableService(
account_name=sa,
account_key=sakey,
endpoint_suffix=ep)
return blob_client, queue_client, table_client
示例17
def __init__(
self, blob_client: azure.storage.blob.BlockBlobService,
queue_client: azure.storage.queue.QueueService,
table_client: azure.storage.table.TableService,
resource: str, msg_id: str, nglobalresources: int):
"""DockerSaveThread ctor
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param azure.storage.queue.QueueService queue_client: queue client
:param azure.storage.table.TableService table_client: table client
:param str resource: resource
:param str msg_id: queue message id
:param int nglobalresources: number of global resources
"""
threading.Thread.__init__(self)
self.blob_client = blob_client
self.queue_client = queue_client
self.table_client = table_client
self.resource = resource
self.msg_id = msg_id
self.nglobalresources = nglobalresources
with _DIRECTDL_LOCK:
_DIRECTDL_DOWNLOADING.append(self.resource)
示例18
def upload_resource_files(blob_client, config, files):
# type: (azure.storage.blob.BlockBlobService, dict, List[tuple]) -> dict
"""Upload resource files to blob storage
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param dict config: configuration dict
:param list files: files to upload
:rtype: dict
:return: sas url dict
"""
sas_urls = {}
for file in files:
_check_file_and_upload(blob_client, file, 'blob_resourcefiles')
sas_urls[file[0]] = 'https://{}.blob.{}/{}/{}?{}'.format(
_STORAGEACCOUNT, _STORAGEACCOUNTEP,
_STORAGE_CONTAINERS['blob_resourcefiles'], file[0],
blob_client.generate_blob_shared_access_signature(
_STORAGE_CONTAINERS['blob_resourcefiles'], file[0],
permission=azureblob.BlobPermissions.READ,
expiry=datetime.datetime.utcnow() +
datetime.timedelta(days=_DEFAULT_SAS_EXPIRY_DAYS)
)
)
return sas_urls
示例19
def upload_for_remotefs(blob_client, files):
# type: (azure.storage.blob.BlockBlobService, List[tuple]) -> List[str]
"""Upload files to blob storage for remote fs
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param dict config: configuration dict
:param list files: files to upload
:rtype: list
:return: list of file urls
"""
ret = []
for file in files:
_check_file_and_upload(blob_client, file, 'blob_remotefs')
ret.append('https://{}.blob.{}/{}/{}'.format(
_STORAGEACCOUNT, _STORAGEACCOUNTEP,
_STORAGE_CONTAINERS['blob_remotefs'], file[0]))
return ret
示例20
def delete_storage_containers(
blob_client, queue_client, table_client, config, skip_tables=False):
# type: (azureblob.BlockBlobService, azurequeue.QueueService,
# azuretable.TableService, dict, bool) -> None
"""Delete storage containers
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param azure.storage.queue.QueueService queue_client: queue client
:param azure.storage.table.TableService table_client: table client
:param dict config: configuration dict
:param bool skip_tables: skip deleting tables
"""
for key in _STORAGE_CONTAINERS:
if key.startswith('blob_'):
if key != 'blob_remotefs':
logger.debug('deleting container: {}'.format(
_STORAGE_CONTAINERS[key]))
blob_client.delete_container(_STORAGE_CONTAINERS[key])
elif not skip_tables and key.startswith('table_'):
logger.debug('deleting table: {}'.format(_STORAGE_CONTAINERS[key]))
table_client.delete_table(_STORAGE_CONTAINERS[key])
elif key.startswith('queue_'):
logger.debug('deleting queue: {}'.format(_STORAGE_CONTAINERS[key]))
queue_client.delete_queue(_STORAGE_CONTAINERS[key])
示例21
def clear_storage_containers(
blob_client, queue_client, table_client, config, tables_only=False):
# type: (azureblob.BlockBlobService, azurequeue.QueueService,
# azuretable.TableService, dict, bool) -> None
"""Clear storage containers
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param azure.storage.queue.QueueService queue_client: queue client
:param azure.storage.table.TableService table_client: table client
:param dict config: configuration dict
:param bool tables_only: clear only tables
"""
bs = settings.batch_shipyard_settings(config)
for key in _STORAGE_CONTAINERS:
if not tables_only and key.startswith('blob_'):
if key != 'blob_remotefs':
_clear_blobs(blob_client, _STORAGE_CONTAINERS[key])
elif key.startswith('table_'):
try:
_clear_table(table_client, _STORAGE_CONTAINERS[key], config)
except azure.common.AzureMissingResourceHttpError:
if key != 'table_perf' or bs.store_timing_metrics:
raise
elif not tables_only and key.startswith('queue_'):
logger.info('clearing queue: {}'.format(_STORAGE_CONTAINERS[key]))
queue_client.clear_messages(_STORAGE_CONTAINERS[key])
示例22
def create_storage_containers(blob_client, queue_client, table_client, config):
# type: (azureblob.BlockBlobService, azurequeue.QueueService,
# azuretable.TableService, dict) -> None
"""Create storage containers
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param azure.storage.queue.QueueService queue_client: queue client
:param azure.storage.table.TableService table_client: table client
:param dict config: configuration dict
"""
bs = settings.batch_shipyard_settings(config)
for key in _STORAGE_CONTAINERS:
if key.startswith('blob_'):
logger.info('creating container: {}'.format(
_STORAGE_CONTAINERS[key]))
blob_client.create_container(_STORAGE_CONTAINERS[key])
elif key.startswith('table_'):
if key == 'table_perf' and not bs.store_timing_metrics:
continue
logger.info('creating table: {}'.format(_STORAGE_CONTAINERS[key]))
table_client.create_table(_STORAGE_CONTAINERS[key])
elif key.startswith('queue_'):
logger.info('creating queue: {}'.format(_STORAGE_CONTAINERS[key]))
queue_client.create_queue(_STORAGE_CONTAINERS[key])
示例23
def create_storage_clients():
# type: (None) -> tuple
"""Create storage clients
:rtype: tuple
:return: blob_client, queue_client, table_client
"""
account_name = storage.get_storageaccount()
account_key = storage.get_storageaccount_key()
endpoint_suffix = storage.get_storageaccount_endpoint()
blob_client = azureblob.BlockBlobService(
account_name=account_name,
account_key=account_key,
endpoint_suffix=endpoint_suffix,
)
queue_client = azurequeue.QueueService(
account_name=account_name,
account_key=account_key,
endpoint_suffix=endpoint_suffix,
)
table_client = azuretable.TableService(
account_name=account_name,
account_key=account_key,
endpoint_suffix=endpoint_suffix,
)
return blob_client, queue_client, table_client
示例24
def _upload_file_to_azure_storage_account(
self, client: BlockBlobService, source: str, destination: str, container: str = None
):
"""Upload the file to the specified Azure Storage Account.
Assumption is that any cloud environment has access to a shared repository of artifacts.
Args:
client: Azure Storage Account client
destination: Name of the file
container: Name of the container the file should be uploaded to
"""
if not container:
container = self.config["azure"]["common"]["artifacts_shared_storage_account_container_name"]
logger.info(
f"""uploading artifact from
| from ${source}
| to ${destination}
| in container {container}"""
)
client.create_blob_from_path(container_name=container, blob_name=destination, file_path=source)
示例25
def __init__(self, settings: BlobStorageSettings):
if settings.connection_string:
client = BlockBlobService(connection_string=settings.connection_string)
elif settings.account_name and settings.account_key:
client = BlockBlobService(
account_name=settings.account_name, account_key=settings.account_key
)
else:
raise Exception(
"Connection string should be provided if there are no account name and key"
)
self.client = client
self.settings = settings
示例26
def create_client(stage_info, use_accelerate_endpoint: bool = False):
"""Creates a client object with a stage credential.
Args:
stage_info: Information about the stage.
use_accelerate_endpoint: Not used for Azure client.
Returns:
The client to communicate with GCS.
"""
stage_credentials = stage_info['creds']
sas_token = stage_credentials['AZURE_SAS_TOKEN']
if sas_token and sas_token.startswith('?'):
sas_token = sas_token[1:]
end_point = stage_info['endPoint']
if end_point.startswith('blob.'):
end_point = end_point[len('blob.'):]
if use_new_azure_api:
client = BlobServiceClient(
account_url="https://{}.blob.{}".format(
stage_info['storageAccount'],
end_point
),
credential=sas_token)
client._config.retry_policy = ExponentialRetry(
initial_backoff=1,
increment_base=2,
max_attempts=60,
random_jitter_range=2
)
else:
client = BlockBlobService(account_name=stage_info['storageAccount'],
sas_token=sas_token,
endpoint_suffix=end_point)
client._httpclient = RawBodyReadingClient(session=requests.session(), protocol="https", timeout=2000)
client.retry = ExponentialRetry(
initial_backoff=1, increment_base=2, max_attempts=60, random_jitter_range=2).retry
return client
示例27
def _create_azure_connection(self):
"""Return an instance of BlockBlobService."""
return BlockBlobService(account_name=self._account_name, account_key=self._account_key)
示例28
def __init__(self, spec):
super().__init__(spec)
self.__container = spec['container']
self.__account = spec['account']
self.__key = spec.get('key')
self.__sasToken = spec.get('sasToken')
try:
from azure.storage.blob import BlockBlobService
except ImportError:
raise BuildError("azure-storage-blob Python3 library not installed!")
self.__service = BlockBlobService(account_name=self.__account,
account_key=self.__key, sas_token=self.__sasToken, socket_timeout=6000)
示例29
def scriptGetService(args):
parser = argparse.ArgumentParser()
parser.add_argument('account')
parser.add_argument('container')
parser.add_argument('buildid')
parser.add_argument('suffix')
parser.add_argument('file')
parser.add_argument('--key')
parser.add_argument('--sas-token')
args = parser.parse_args(args)
try:
from azure.storage.blob import BlockBlobService
except ImportError:
raise BuildError("azure-storage-blob Python3 library not installed!")
service = BlockBlobService(account_name=args.account, account_key=args.key,
sas_token=args.sas_token, socket_timeout=6000)
try:
with open(args.buildid, 'rb') as f:
remoteBlob = AzureArchive.__makeBlobName(f.read(), args.suffix)
except OSError as e:
raise BuildError(str(e))
return (service, args.container, remoteBlob, args.file)
示例30
def upload_file_to_container(block_blob_client, container_name, file_path, duration_hours=24):
"""
Uploads a local file to an Azure Blob storage container.
:param block_blob_client: A blob service client.
:type block_blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the Azure Blob storage container.
:param str file_path: The local path to the file.
:rtype: `azure.batch.models.ResourceFile`
:return: A ResourceFile initialized with a SAS URL appropriate for Batch
tasks.
"""
blob_name = os.path.basename(file_path)
print("Uploading file {} to container [{}]...".format(file_path, container_name))
block_blob_client.create_blob_from_path(container_name, blob_name, file_path)
sas_token = block_blob_client.generate_blob_shared_access_signature(
container_name,
blob_name,
permission=azureblob.BlobPermissions.READ,
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=duration_hours),
)
sas_url = block_blob_client.make_blob_url(
container_name, blob_name, sas_token=sas_token
)
return models.ResourceFile(http_url=sas_url, file_path=_CONTAINER_INPUT_FILE)