Skip to content
This repository was archived by the owner on Mar 31, 2026. It is now read-only.
This repository was archived by the owner on Mar 31, 2026. It is now read-only.

tests.system.test_fileio: test_blobwriter_and_blobreader failed #963

@flaky-bot

Description

@flaky-bot

Note: #928 was also for this test, but it was closed more than 10 days ago. So, I didn't mark it flaky.


commit: 1d384bf
buildURL: Build Status, Sponge
status: failed

Test output
shared_bucket = , blobs_to_delete = []
file_data = {'big': {'hash': b'cEome4a+NYd7YIXzXQnR5Q==', 'path': '/tmpfs/src/github/python-storage/tests/data/five-point-one-mb-f...g'}, 'simple': {'hash': b'3Hkwjv2WvCnKjNR6Z3CboA==', 'path': '/tmpfs/src/github/python-storage/tests/data/simple.txt'}}
service_account = 
def test_blobwriter_and_blobreader(
    shared_bucket,
    blobs_to_delete,
    file_data,
    service_account,
):
    blob = shared_bucket.blob("LargeFile")

    # Test BlobWriter works.
    info = file_data["big"]
    with open(info["path"], "rb") as file_obj:
        with blob.open("wb", chunk_size=256 * 1024) as writer:
            writer.write(file_obj.read(100))
          writer.write(file_obj.read(256 * 1024))

tests/system/test_fileio.py:33:


self = <google.cloud.storage.fileio.BlobWriter object at 0x7ffa6b0136d0>
b = b'J\xdc\xd2!\x97\x0f\x18\xf3%\tE\xe6\x1e\xa8\x18\x99\x08\t\x92C\xb1\xbcaf\xea|\x1e\xec\xc4\x07\x02!tnV\xed\x0eQ:\x94L...xbe\x08M\xb8q\x1e\xa6\x98\xfd\x89>x`6\x88M\x04D\xf2#\x16\xc3q\x89nx\xe0\x0e9\x12\x88\xb0\xe3#\xbaNJ%\xa6\x17\x7f}\xeeX'

def write(self, b):
    self._checkClosed()  # Raises ValueError if closed.

    pos = self._buffer.write(b)

    # If there is enough content, upload chunks.
    num_chunks = len(self._buffer) // self._chunk_size
    if num_chunks:
      self._upload_chunks_from_buffer(num_chunks)

google/cloud/storage/fileio.py:357:


self = <google.cloud.storage.fileio.BlobWriter object at 0x7ffa6b0136d0>
num_chunks = 1

def _upload_chunks_from_buffer(self, num_chunks):
    """Upload a specified number of chunks."""

    # Initialize the upload if necessary.
    if not self._upload_and_transport:
      self._initiate_upload()

google/cloud/storage/fileio.py:405:


self = <google.cloud.storage.fileio.BlobWriter object at 0x7ffa6b0136d0>

def _initiate_upload(self):
    # num_retries is only supported for backwards-compatibility reasons.
    num_retries = self._upload_kwargs.pop("num_retries", None)
    retry = self._retry
    content_type = self._upload_kwargs.pop("content_type", None)

    if num_retries is not None:
        warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2)
        # num_retries and retry are mutually exclusive. If num_retries is
        # set and retry is exactly the default, then nullify retry for
        # backwards compatibility.
        if retry is DEFAULT_RETRY_IF_GENERATION_SPECIFIED:
            retry = None

    # Handle ConditionalRetryPolicy.
    if isinstance(retry, ConditionalRetryPolicy):
        # Conditional retries are designed for non-media calls, which change
        # arguments into query_params dictionaries. Media operations work
        # differently, so here we make a "fake" query_params to feed to the
        # ConditionalRetryPolicy.
        query_params = {
            "ifGenerationMatch": self._upload_kwargs.get("if_generation_match"),
            "ifMetagenerationMatch": self._upload_kwargs.get(
                "if_metageneration_match"
            ),
        }
        retry = retry.get_retry_policy_if_conditions_met(query_params=query_params)
  self._upload_and_transport = self._blob._initiate_resumable_upload(
        self._blob.bucket.client,
        self._buffer,
        content_type,
        None,
        num_retries,
        chunk_size=self._chunk_size,
        retry=retry,
        **self._upload_kwargs,
    )

google/cloud/storage/fileio.py:389:


self = <Blob: gcp-systest-1673285927243, LargeFile, None>
client = <google.cloud.storage.client.Client object at 0x7ffa6cb81070>
stream = <google.cloud.storage.fileio.SlidingBuffer object at 0x7ffa6b013b80>
content_type = 'application/octet-stream', size = None, num_retries = None
predefined_acl = None, extra_headers = None, chunk_size = 262144
if_generation_match = None, if_generation_not_match = None
if_metageneration_match = None, if_metageneration_not_match = None, timeout = 60
checksum = None, retry = None

def _initiate_resumable_upload(
    self,
    client,
    stream,
    content_type,
    size,
    num_retries,
    predefined_acl=None,
    extra_headers=None,
    chunk_size=None,
    if_generation_match=None,
    if_generation_not_match=None,
    if_metageneration_match=None,
    if_metageneration_not_match=None,
    timeout=_DEFAULT_TIMEOUT,
    checksum=None,
    retry=None,
):
    """Initiate a resumable upload.

    The content type of the upload will be determined in order
    of precedence:

    - The value passed in to this method (if not :data:`None`)
    - The value stored on the current blob
    - The default value ('application/octet-stream')

    :type client: :class:`~google.cloud.storage.client.Client`
    :param client:
        (Optional) The client to use.  If not passed, falls back to the
        ``client`` stored on the blob's bucket.

    :type stream: IO[bytes]
    :param stream: A bytes IO object open for reading.

    :type content_type: str
    :param content_type: Type of content being uploaded (or :data:`None`).

    :type size: int
    :param size:
        The number of bytes to be uploaded (which will be read from
        ``stream``). If not provided, the upload will be concluded once
        ``stream`` is exhausted (or :data:`None`).

    :type predefined_acl: str
    :param predefined_acl: (Optional) Predefined access control list

    :type num_retries: int
    :param num_retries:
        Number of upload retries. By default, only uploads with
        if_generation_match set will be retried, as uploads without the
        argument are not guaranteed to be idempotent. Setting num_retries
        will override this default behavior and guarantee retries even when
        if_generation_match is not set.  (Deprecated: This argument
        will be removed in a future release.)

    :type extra_headers: dict
    :param extra_headers:
        (Optional) Extra headers to add to standard headers.

    :type chunk_size: int
    :param chunk_size:
        (Optional) Chunk size to use when creating a
        :class:`~google.resumable_media.requests.ResumableUpload`.
        If not passed, will fall back to the chunk size on the
        current blob, if the chunk size of a current blob is also
        `None`, will set the default value.
        The default value of ``chunk_size`` is 100 MB.

    :type if_generation_match: long
    :param if_generation_match:
        (Optional) See :ref:`using-if-generation-match`

    :type if_generation_not_match: long
    :param if_generation_not_match:
        (Optional) See :ref:`using-if-generation-not-match`

    :type if_metageneration_match: long
    :param if_metageneration_match:
        (Optional) See :ref:`using-if-metageneration-match`

    :type if_metageneration_not_match: long
    :param if_metageneration_not_match:
        (Optional) See :ref:`using-if-metageneration-not-match`

    :type timeout: float or tuple
    :param timeout:
        (Optional) The amount of time, in seconds, to wait
        for the server response.  See: :ref:`configuring_timeouts`

    :type checksum: str
    :param checksum:
        (Optional) The type of checksum to compute to verify
        the integrity of the object. After the upload is complete, the
        server-computed checksum of the resulting object will be checked
        and google.resumable_media.common.DataCorruption will be raised on
        a mismatch. On a validation failure, the client will attempt to
        delete the uploaded object automatically. Supported values
        are "md5", "crc32c" and None. The default is None.

    :type retry: google.api_core.retry.Retry
    :param retry: (Optional) How to retry the RPC. A None value will disable
        retries. A google.api_core.retry.Retry value will enable retries,
        and the object will configure backoff and timeout options. Custom
        predicates (customizable error codes) are not supported for media
        operations such as this one.

        This private method does not accept ConditionalRetryPolicy values
        because the information necessary to evaluate the policy is instead
        evaluated in blob._do_upload().

        See the retry.py source code and docstrings in this package
        (google.cloud.storage.retry) for information on retry types and how
        to configure them.

    :rtype: tuple
    :returns:
        Pair of

        * The :class:`~google.resumable_media.requests.ResumableUpload`
          that was created
        * The ``transport`` used to initiate the upload.
    """
    client = self._require_client(client)
    if chunk_size is None:
        chunk_size = self.chunk_size
        if chunk_size is None:
            chunk_size = _DEFAULT_CHUNKSIZE

    transport = self._get_transport(client)
    if "metadata" in self._properties and "metadata" not in self._changes:
        self._changes.add("metadata")
    info = self._get_upload_arguments(client, content_type)
    headers, object_metadata, content_type = info
    if extra_headers is not None:
        headers.update(extra_headers)

    hostname = _get_host_name(client._connection)
    base_url = _RESUMABLE_URL_TEMPLATE.format(
        hostname=hostname, bucket_path=self.bucket.path, api_version=_API_VERSION
    )
    name_value_pairs = []

    if self.user_project is not None:
        name_value_pairs.append(("userProject", self.user_project))

    # When a Customer Managed Encryption Key is used to encrypt Cloud Storage object
    # at rest, object resource metadata will store the version of the Key Management
    # Service cryptographic material. If a Blob instance with KMS Key metadata set is
    # used to upload a new version of the object then the existing kmsKeyName version
    # value can't be used in the upload request and the client instead ignores it.
    if (
        self.kms_key_name is not None
        and "cryptoKeyVersions" not in self.kms_key_name
    ):
        name_value_pairs.append(("kmsKeyName", self.kms_key_name))

    if predefined_acl is not None:
        name_value_pairs.append(("predefinedAcl", predefined_acl))

    if if_generation_match is not None:
        name_value_pairs.append(("ifGenerationMatch", if_generation_match))

    if if_generation_not_match is not None:
        name_value_pairs.append(("ifGenerationNotMatch", if_generation_not_match))

    if if_metageneration_match is not None:
        name_value_pairs.append(("ifMetagenerationMatch", if_metageneration_match))

    if if_metageneration_not_match is not None:
        name_value_pairs.append(
            ("ifMetaGenerationNotMatch", if_metageneration_not_match)
        )

    upload_url = _add_query_parameters(base_url, name_value_pairs)
    upload = ResumableUpload(
        upload_url, chunk_size, headers=headers, checksum=checksum
    )

    upload._retry_strategy = _api_core_retry_to_resumable_media_retry(
        retry, num_retries
    )
  upload.initiate(
        transport,
        stream,
        object_metadata,
        content_type,
        total_bytes=size,
        stream_final=False,
        timeout=timeout,
    )

google/cloud/storage/blob.py:2078:


self = <google.resumable_media.requests.upload.ResumableUpload object at 0x7ffa6b0132b0>
transport = <google.auth.transport.requests.AuthorizedSession object at 0x7ffa6ccc82b0>
stream = <google.cloud.storage.fileio.SlidingBuffer object at 0x7ffa6b013b80>
metadata = {'name': 'LargeFile'}, content_type = 'application/octet-stream'
total_bytes = None, stream_final = False, timeout = 60

def initiate(
    self,
    transport,
    stream,
    metadata,
    content_type,
    total_bytes=None,
    stream_final=True,
    timeout=(
        _request_helpers._DEFAULT_CONNECT_TIMEOUT,
        _request_helpers._DEFAULT_READ_TIMEOUT,
    ),
):
    """Initiate a resumable upload.

    By default, this method assumes your ``stream`` is in a "final"
    state ready to transmit. However, ``stream_final=False`` can be used
    to indicate that the size of the resource is not known. This can happen
    if bytes are being dynamically fed into ``stream``, e.g. if the stream
    is attached to application logs.

    If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be
    read from the stream every time :meth:`transmit_next_chunk` is called.
    If one of those reads produces strictly fewer bites than the chunk
    size, the upload will be concluded.

    Args:
        transport (~requests.Session): A ``requests`` object which can
            make authenticated requests.
        stream (IO[bytes]): The stream (i.e. file-like object) that will
            be uploaded. The stream **must** be at the beginning (i.e.
            ``stream.tell() == 0``).
        metadata (Mapping[str, str]): The resource metadata, such as an
            ACL list.
        content_type (str): The content type of the resource, e.g. a JPEG
            image has content type ``image/jpeg``.
        total_bytes (Optional[int]): The total number of bytes to be
            uploaded. If specified, the upload size **will not** be
            determined from the stream (even if ``stream_final=True``).
        stream_final (Optional[bool]): Indicates if the ``stream`` is
            "final" (i.e. no more bytes will be added to it). In this case
            we determine the upload size from the size of the stream. If
            ``total_bytes`` is passed, this argument will be ignored.
        timeout (Optional[Union[float, Tuple[float, float]]]):
            The number of seconds to wait for the server response.
            Depending on the retry strategy, a request may be repeated
            several times using the same timeout each time.

            Can also be passed as a tuple (connect_timeout, read_timeout).
            See :meth:`requests.Session.request` documentation for details.

    Returns:
        ~requests.Response: The HTTP response returned by ``transport``.
    """
    method, url, payload, headers = self._prepare_initiate_request(
        stream,
        metadata,
        content_type,
        total_bytes=total_bytes,
        stream_final=stream_final,
    )

    # Wrap the request business logic in a function to be retried.
    def retriable_request():
        result = transport.request(
            method, url, data=payload, headers=headers, timeout=timeout
        )

        self._process_initiate_response(result)

        return result
  return _request_helpers.wait_and_retry(
        retriable_request, self._get_status_code, self._retry_strategy
    )

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/upload.py:420:


func = <function ResumableUpload.initiate..retriable_request at 0x7ffa6afcd820>
get_status_code = <function RequestsMixin._get_status_code at 0x7ffa6d0f24c0>
retry_strategy = <google.resumable_media.common.RetryStrategy object at 0x7ffa6b0132e0>

def wait_and_retry(func, get_status_code, retry_strategy):
    """Attempts to retry a call to ``func`` until success.

    Expects ``func`` to return an HTTP response and uses ``get_status_code``
    to check if the response is retry-able.

    ``func`` is expected to raise a failure status code as a
    common.InvalidResponse, at which point this method will check the code
    against the common.RETRIABLE list of retriable status codes.

    Will retry until :meth:`~.RetryStrategy.retry_allowed` (on the current
    ``retry_strategy``) returns :data:`False`. Uses
    :func:`_helpers.calculate_retry_wait` to double the wait time (with jitter)
    after each attempt.

    Args:
        func (Callable): A callable that takes no arguments and produces
            an HTTP response which will be checked as retry-able.
        get_status_code (Callable[Any, int]): Helper to get a status code
            from a response.
        retry_strategy (~google.resumable_media.common.RetryStrategy): The
            strategy to use if the request fails and must be retried.

    Returns:
        object: The return value of ``func``.
    """
    total_sleep = 0.0
    num_retries = 0
    # base_wait will be multiplied by the multiplier on the first retry.
    base_wait = float(retry_strategy.initial_delay) / retry_strategy.multiplier

    # Set the retriable_exception_type if possible. We expect requests to be
    # present here and the transport to be using requests.exceptions errors,
    # but due to loose coupling with the transport layer we can't guarantee it.

    while True:  # return on success or when retries exhausted.
        error = None
        try:
            response = func()
        except _CONNECTION_ERROR_CLASSES as e:
            error = e  # Fall through to retry, if there are retries left.
        except common.InvalidResponse as e:
            # An InvalidResponse is only retriable if its status code matches.
            # The `process_response()` method on a Download or Upload method
            # will convert the status code into an exception.
            if get_status_code(e.response) in common.RETRYABLE:
                error = e  # Fall through to retry, if there are retries left.
            else:
                raise  # If the status code is not retriable, raise w/o retry.
        else:
            return response

        base_wait, wait_time = _helpers.calculate_retry_wait(
            base_wait, retry_strategy.max_sleep, retry_strategy.multiplier
        )
        num_retries += 1
        total_sleep += wait_time

        # Check if (another) retry is allowed. If retries are exhausted and
        # no acceptable response was received, raise the retriable error.
        if not retry_strategy.retry_allowed(total_sleep, num_retries):
          raise error

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/_request_helpers.py:171:


func = <function ResumableUpload.initiate..retriable_request at 0x7ffa6afcd820>
get_status_code = <function RequestsMixin._get_status_code at 0x7ffa6d0f24c0>
retry_strategy = <google.resumable_media.common.RetryStrategy object at 0x7ffa6b0132e0>

def wait_and_retry(func, get_status_code, retry_strategy):
    """Attempts to retry a call to ``func`` until success.

    Expects ``func`` to return an HTTP response and uses ``get_status_code``
    to check if the response is retry-able.

    ``func`` is expected to raise a failure status code as a
    common.InvalidResponse, at which point this method will check the code
    against the common.RETRIABLE list of retriable status codes.

    Will retry until :meth:`~.RetryStrategy.retry_allowed` (on the current
    ``retry_strategy``) returns :data:`False`. Uses
    :func:`_helpers.calculate_retry_wait` to double the wait time (with jitter)
    after each attempt.

    Args:
        func (Callable): A callable that takes no arguments and produces
            an HTTP response which will be checked as retry-able.
        get_status_code (Callable[Any, int]): Helper to get a status code
            from a response.
        retry_strategy (~google.resumable_media.common.RetryStrategy): The
            strategy to use if the request fails and must be retried.

    Returns:
        object: The return value of ``func``.
    """
    total_sleep = 0.0
    num_retries = 0
    # base_wait will be multiplied by the multiplier on the first retry.
    base_wait = float(retry_strategy.initial_delay) / retry_strategy.multiplier

    # Set the retriable_exception_type if possible. We expect requests to be
    # present here and the transport to be using requests.exceptions errors,
    # but due to loose coupling with the transport layer we can't guarantee it.

    while True:  # return on success or when retries exhausted.
        error = None
        try:
          response = func()

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/_request_helpers.py:148:


def retriable_request():
    result = transport.request(
        method, url, data=payload, headers=headers, timeout=timeout
    )
  self._process_initiate_response(result)

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/upload.py:416:


self = <google.resumable_media.requests.upload.ResumableUpload object at 0x7ffa6b0132b0>
response = <Response [503]>

def _process_initiate_response(self, response):
    """Process the response from an HTTP request that initiated upload.

    This is everything that must be done after a request that doesn't
    require network I/O (or other I/O). This is based on the `sans-I/O`_
    philosophy.

    This method takes the URL from the ``Location`` header and stores it
    for future use. Within that URL, we assume the ``upload_id`` query
    parameter has been included, but we do not check.

    Args:
        response (object): The HTTP response object (need headers).

    .. _sans-I/O: https://sans-io.readthedocs.io/
    """
  _helpers.require_status_code(
        response,
        (http.client.OK, http.client.CREATED),
        self._get_status_code,
        callback=self._make_invalid,
    )

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/_upload.py:509:


response = <Response [503]>
status_codes = (<HTTPStatus.OK: 200>, <HTTPStatus.CREATED: 201>)
get_status_code = <function RequestsMixin._get_status_code at 0x7ffa6d0f24c0>
callback = <bound method ResumableUpload._make_invalid of <google.resumable_media.requests.upload.ResumableUpload object at 0x7ffa6b0132b0>>

def require_status_code(response, status_codes, get_status_code, callback=do_nothing):
    """Require a response has a status code among a list.

    Args:
        response (object): The HTTP response object.
        status_codes (tuple): The acceptable status codes.
        get_status_code (Callable[Any, int]): Helper to get a status code
            from a response.
        callback (Optional[Callable]): A callback that takes no arguments,
            to be executed when an exception is being raised.

    Returns:
        int: The status code.

    Raises:
        ~google.resumable_media.common.InvalidResponse: If the status code
            is not one of the values in ``status_codes``.
    """
    status_code = get_status_code(response)
    if status_code not in status_codes:
        if status_code not in common.RETRYABLE:
            callback()
      raise common.InvalidResponse(
            response,
            "Request failed with status code",
            status_code,
            "Expected one of",
            *status_codes
        )

E google.resumable_media.common.InvalidResponse: ('Request failed with status code', 503, 'Expected one of', <HTTPStatus.OK: 200>, <HTTPStatus.CREATED: 201>)

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/_helpers.py:108: InvalidResponse

During handling of the above exception, another exception occurred:

shared_bucket = <Bucket: gcp-systest-1673285927243>, blobs_to_delete = []
file_data = {'big': {'hash': b'cEome4a+NYd7YIXzXQnR5Q==', 'path': '/tmpfs/src/github/python-storage/tests/data/five-point-one-mb-f...g'}, 'simple': {'hash': b'3Hkwjv2WvCnKjNR6Z3CboA==', 'path': '/tmpfs/src/github/python-storage/tests/data/simple.txt'}}
service_account = <google.oauth2.service_account.Credentials object at 0x7ffa6cb459a0>

def test_blobwriter_and_blobreader(
    shared_bucket,
    blobs_to_delete,
    file_data,
    service_account,
):
    blob = shared_bucket.blob("LargeFile")

    # Test BlobWriter works.
    info = file_data["big"]
    with open(info["path"], "rb") as file_obj:
        with blob.open("wb", chunk_size=256 * 1024) as writer:
            writer.write(file_obj.read(100))
            writer.write(file_obj.read(256 * 1024))
          writer.write(file_obj.read())

tests/system/test_fileio.py:34:


google/cloud/storage/fileio.py:431: in close
self._upload_chunks_from_buffer(1)
google/cloud/storage/fileio.py:405: in _upload_chunks_from_buffer
self._initiate_upload()
google/cloud/storage/fileio.py:389: in _initiate_upload
self._upload_and_transport = self._blob._initiate_resumable_upload(
google/cloud/storage/blob.py:2078: in _initiate_resumable_upload
upload.initiate(
.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/upload.py:420: in initiate
return _request_helpers.wait_and_retry(
.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/_request_helpers.py:171: in wait_and_retry
raise error
.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/_request_helpers.py:148: in wait_and_retry
response = func()
.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/requests/upload.py:416: in retriable_request
self._process_initiate_response(result)
.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/_upload.py:509: in _process_initiate_response
_helpers.require_status_code(


response = <Response [503]>
status_codes = (<HTTPStatus.OK: 200>, <HTTPStatus.CREATED: 201>)
get_status_code = <function RequestsMixin._get_status_code at 0x7ffa6d0f24c0>
callback = <bound method ResumableUpload._make_invalid of <google.resumable_media.requests.upload.ResumableUpload object at 0x7ffa6b013280>>

def require_status_code(response, status_codes, get_status_code, callback=do_nothing):
    """Require a response has a status code among a list.

    Args:
        response (object): The HTTP response object.
        status_codes (tuple): The acceptable status codes.
        get_status_code (Callable[Any, int]): Helper to get a status code
            from a response.
        callback (Optional[Callable]): A callback that takes no arguments,
            to be executed when an exception is being raised.

    Returns:
        int: The status code.

    Raises:
        ~google.resumable_media.common.InvalidResponse: If the status code
            is not one of the values in ``status_codes``.
    """
    status_code = get_status_code(response)
    if status_code not in status_codes:
        if status_code not in common.RETRYABLE:
            callback()
      raise common.InvalidResponse(
            response,
            "Request failed with status code",
            status_code,
            "Expected one of",
            *status_codes
        )

E google.resumable_media.common.InvalidResponse: ('Request failed with status code', 503, 'Expected one of', <HTTPStatus.OK: 200>, <HTTPStatus.CREATED: 201>)

.nox/system-3-8/lib/python3.8/site-packages/google/resumable_media/_helpers.py:108: InvalidResponse

Metadata

Metadata

Assignees

No one assigned

    Labels

    api: storageIssues related to the googleapis/python-storage API.flakybot: flakyTells the Flaky Bot not to close or comment on this issue.flakybot: issueAn issue filed by the Flaky Bot. Should not be added manually.priority: p2Moderately-important priority. Fix may not be included in next release.type: bugError or flaw in code with unintended results or allowing sub-optimal usage patterns.

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions