Skip to content

Commit

Permalink
[Rename]rename max_connections to max_concurrency (Azure#7606)
Browse files Browse the repository at this point in the history
  • Loading branch information
xiafu-msft authored Oct 4, 2019
1 parent 637535e commit 61b6388
Show file tree
Hide file tree
Showing 11 changed files with 72 additions and 72 deletions.
8 changes: 4 additions & 4 deletions sdk/storage/azure-storage-blob/azure/storage/blob/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,10 @@ def __init__(self, allowed_origins, allowed_methods, **kwargs):
class ContainerProperties(DictMixin):
"""Blob container's properties class.
Returned ``ContainerProperties`` instances expose these values through a
dictionary interface, for example: ``container_props["last_modified"]``.
Additionally, the container name is available as ``container_props["name"]``.
:param datetime last_modified:
A datetime object representing the last time the container was modified.
:param str etag:
Expand All @@ -259,10 +263,6 @@ class ContainerProperties(DictMixin):
Represents whether the container has a legal hold.
:param dict metadata: A dict with name-value pairs to associate with the
container as metadata.
Returned ``ContainerProperties`` instances expose these values through a
dictionary interface, for example: ``container_props["last_modified"]``.
Additionally, the container name is available as ``container_props["name"]``.
"""

def __init__(self, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def upload_data_chunks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
validate_content=None,
encryption_options=None,
Expand All @@ -63,7 +63,7 @@ def upload_data_chunks(
kwargs['encryptor'] = encryptor
kwargs['padder'] = padder

parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -77,11 +77,11 @@ def upload_data_chunks(
validate_content=validate_content,
**kwargs)
if parallel:
executor = futures.ThreadPoolExecutor(max_connections)
executor = futures.ThreadPoolExecutor(max_concurrency)
upload_tasks = uploader.get_chunk_streams()
running_futures = [
executor.submit(with_current_context(uploader.process_chunk), u)
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures)
else:
Expand All @@ -96,10 +96,10 @@ def upload_substream_blocks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
**kwargs):
parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -112,11 +112,11 @@ def upload_substream_blocks(
**kwargs)

if parallel:
executor = futures.ThreadPoolExecutor(max_connections)
executor = futures.ThreadPoolExecutor(max_concurrency)
upload_tasks = uploader.get_substream_blocks()
running_futures = [
executor.submit(with_current_context(uploader.process_substream_block), u)
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures)
else:
Expand Down Expand Up @@ -420,7 +420,7 @@ def read(self, n):
# or read in just enough data for the current block/sub stream
current_max_buffer_size = min(self._max_buffer_size, self._length - self._position)

# lock is only defined if max_connections > 1 (parallel uploads)
# lock is only defined if max_concurrency > 1 (parallel uploads)
if self._lock:
with self._lock:
# reposition the underlying stream to match the start of the data to read
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ async def upload_data_chunks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
encryption_options=None,
**kwargs):
Expand All @@ -63,7 +63,7 @@ async def upload_data_chunks(
kwargs['encryptor'] = encryptor
kwargs['padder'] = padder

parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -80,7 +80,7 @@ async def upload_data_chunks(
upload_tasks = uploader.get_chunk_streams()
running_futures = [
asyncio.ensure_future(uploader.process_chunk(u))
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures)
else:
Expand All @@ -98,10 +98,10 @@ async def upload_substream_blocks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
**kwargs):
parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -117,7 +117,7 @@ async def upload_substream_blocks(
upload_tasks = uploader.get_substream_blocks()
running_futures = [
asyncio.ensure_future(uploader.process_substream_block(u))
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = await _parallel_uploads(uploader.process_substream_block, upload_tasks, running_futures)
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ async def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
timeout=None, # type: Optional[int]
encoding='UTF-8', # type: str
**kwargs # type: Any
Expand All @@ -504,7 +504,7 @@ async def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand All @@ -529,7 +529,7 @@ async def upload_file(
metadata=metadata,
content_settings=content_settings,
validate_content=validate_content,
max_connections=max_connections,
max_concurrency=max_concurrency,
timeout=timeout,
encoding=encoding,
**kwargs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ async def _upload_file_helper(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
file_settings,
file_attributes="none",
file_creation_time="now",
Expand Down Expand Up @@ -76,7 +76,7 @@ async def _upload_file_helper(
total_size=size,
chunk_size=file_settings.max_range_size,
stream=stream,
max_connections=max_connections,
max_concurrency=max_concurrency,
validate_content=validate_content,
timeout=timeout,
**kwargs
Expand Down Expand Up @@ -245,7 +245,7 @@ async def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
file_attributes="none", # type: Union[str, NTFSAttributes]
file_creation_time="now", # type: Union[str, datetime]
file_last_write_time="now", # type: Union[str, datetime]
Expand Down Expand Up @@ -274,7 +274,7 @@ async def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand Down Expand Up @@ -342,7 +342,7 @@ async def upload_file(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
self._config,
file_attributes=file_attributes,
file_creation_time=file_creation_time,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
timeout=None, # type: Optional[int]
encoding='UTF-8', # type: str
**kwargs # type: Any
Expand All @@ -577,7 +577,7 @@ def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand All @@ -602,7 +602,7 @@ def upload_file(
metadata=metadata,
content_settings=content_settings,
validate_content=validate_content,
max_connections=max_connections,
max_concurrency=max_concurrency,
timeout=timeout,
encoding=encoding,
**kwargs)
Expand Down
10 changes: 5 additions & 5 deletions sdk/storage/azure-storage-file/azure/storage/file/file_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def _upload_file_helper(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
file_settings,
file_attributes="none",
file_creation_time="now",
Expand Down Expand Up @@ -83,7 +83,7 @@ def _upload_file_helper(
total_size=size,
chunk_size=file_settings.max_range_size,
stream=stream,
max_connections=max_connections,
max_concurrency=max_concurrency,
validate_content=validate_content,
timeout=timeout,
**kwargs
Expand Down Expand Up @@ -433,7 +433,7 @@ def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
file_attributes="none", # type: Union[str, NTFSAttributes]
file_creation_time="now", # type: Union[str, datetime]
file_last_write_time="now", # type: Union[str, datetime]
Expand Down Expand Up @@ -462,7 +462,7 @@ def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand Down Expand Up @@ -529,7 +529,7 @@ def upload_file(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
self._config,
file_attributes=file_attributes,
file_creation_time=file_creation_time,
Expand Down
22 changes: 11 additions & 11 deletions sdk/storage/azure-storage-file/tests/test_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -1020,7 +1020,7 @@ def callback(response):
if current is not None:
progress.append((current, total))

response = file_client.upload_file(data, max_connections=2, raw_response_hook=callback)
response = file_client.upload_file(data, max_concurrency=2, raw_response_hook=callback)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand All @@ -1045,7 +1045,7 @@ def test_create_file_from_bytes_with_index(self):
max_range_size=4 * 1024)

# Act
response = file_client.upload_file(data[index:], max_connections=2)
response = file_client.upload_file(data[index:], max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand All @@ -1071,7 +1071,7 @@ def test_create_file_from_bytes_with_index_and_count(self):
max_range_size=4 * 1024)

# Act
response = file_client.upload_file(data[index:], length=count, max_connections=2)
response = file_client.upload_file(data[index:], length=count, max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand All @@ -1098,7 +1098,7 @@ def test_create_file_from_path(self):

# Act
with open(INPUT_FILE_PATH, 'rb') as stream:
response = file_client.upload_file(stream, max_connections=2)
response = file_client.upload_file(stream, max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand Down Expand Up @@ -1132,7 +1132,7 @@ def callback(response):
progress.append((current, total))

with open(INPUT_FILE_PATH, 'rb') as stream:
response = file_client.upload_file(stream, max_connections=2, raw_response_hook=callback)
response = file_client.upload_file(stream, max_concurrency=2, raw_response_hook=callback)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand Down Expand Up @@ -1164,7 +1164,7 @@ def test_create_file_from_stream(self):
# Act
file_size = len(data)
with open(INPUT_FILE_PATH, 'rb') as stream:
response = file_client.upload_file(stream, max_connections=2)
response = file_client.upload_file(stream, max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand Down Expand Up @@ -1193,7 +1193,7 @@ def test_create_file_from_stream_non_seekable(self):
file_size = len(data)
with open(INPUT_FILE_PATH, 'rb') as stream:
non_seekable_file = StorageFileTest.NonSeekableFile(stream)
file_client.upload_file(non_seekable_file, length=file_size, max_connections=1)
file_client.upload_file(non_seekable_file, length=file_size, max_concurrency=1)

# Assert
self.assertFileEqual(file_client, data[:file_size])
Expand Down Expand Up @@ -1225,7 +1225,7 @@ def callback(response):

file_size = len(data)
with open(INPUT_FILE_PATH, 'rb') as stream:
file_client.upload_file(stream, max_connections=2, raw_response_hook=callback)
file_client.upload_file(stream, max_concurrency=2, raw_response_hook=callback)

# Assert
self.assertFileEqual(file_client, data[:file_size])
Expand Down Expand Up @@ -1254,7 +1254,7 @@ def test_create_file_from_stream_truncated(self):
# Act
file_size = len(data) - 512
with open(INPUT_FILE_PATH, 'rb') as stream:
file_client.upload_file(stream, length=file_size, max_connections=2)
file_client.upload_file(stream, length=file_size, max_concurrency=2)

# Assert
self.assertFileEqual(file_client, data[:file_size])
Expand Down Expand Up @@ -1286,7 +1286,7 @@ def callback(response):

file_size = len(data) - 5
with open(INPUT_FILE_PATH, 'rb') as stream:
file_client.upload_file(stream, length=file_size, max_connections=2, raw_response_hook=callback)
file_client.upload_file(stream, length=file_size, max_concurrency=2, raw_response_hook=callback)


# Assert
Expand Down Expand Up @@ -1389,7 +1389,7 @@ def test_create_file_with_md5_large(self):
max_range_size=4 * 1024)

# Act
file_client.upload_file(data, validate_content=True, max_connections=2)
file_client.upload_file(data, validate_content=True, max_concurrency=2)

# Assert

Expand Down
Loading

0 comments on commit 61b6388

Please sign in to comment.