Skip to content
This repository has been archived by the owner on Jan 19, 2024. It is now read-only.

Fix Bug#131: 0 rows returns when server return 429 on first page of results. #132

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 4 additions & 10 deletions azure/cosmos/execution_context/base_execution_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,13 +135,7 @@ def _fetch_items_helper_no_retries(self, fetch_function):
if fetched_items:
break
return fetched_items

def _fetch_items_helper_with_retries(self, fetch_function):
def callback():
return self._fetch_items_helper_no_retries(fetch_function)

return retry_utility._Execute(self._client, self._client._global_endpoint_manager, callback)



class _DefaultQueryExecutionContext(_QueryExecutionContextBase):
"""
Expand All @@ -166,7 +160,7 @@ def __init__(self, client, options, fetch_function):

def _fetch_next_block(self):
while super(_DefaultQueryExecutionContext, self)._has_more_pages() and len(self._buffer) == 0:
return self._fetch_items_helper_with_retries(self._fetch_function)
return self._fetch_items_helper_no_retries(self._fetch_function)

class _MultiCollectionQueryExecutionContext(_QueryExecutionContextBase):
"""
Expand Down Expand Up @@ -235,7 +229,7 @@ def _fetch_next_block(self):
:rtype: list
"""
# Fetch next block of results by executing the query against the current document collection
fetched_items = self._fetch_items_helper_with_retries(self._fetch_function)
fetched_items = self._fetch_items_helper_no_retries(self._fetch_function)

# If there are multiple document collections to query for(in case of partitioning), keep looping through each one of them,
# creating separate feed queries for each collection and fetching the items
Expand All @@ -255,7 +249,7 @@ def fetch_fn(options):

self._fetch_function = fetch_fn

fetched_items = self._fetch_items_helper_with_retries(self._fetch_function)
fetched_items = self._fetch_items_helper_no_retries(self._fetch_function)
self._current_collection_index += 1
else:
break
Expand Down
4 changes: 2 additions & 2 deletions azure/cosmos/retry_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class RetryOptions(object):
:ivar int MaxWaitTimeInSeconds:
Max wait time in seconds to wait for a request while the retries are happening. Default value 30 seconds.
"""
def __init__(self, max_retry_attempt_count = 9, fixed_retry_interval_in_milliseconds = None, max_wait_time_in_seconds = 30):
def __init__(self, max_retry_attempt_count = 17, fixed_retry_interval_in_milliseconds = None, max_wait_time_in_seconds = 60):
self._max_retry_attempt_count = max_retry_attempt_count
self._fixed_retry_interval_in_milliseconds = fixed_retry_interval_in_milliseconds
self._max_wait_time_in_seconds = max_wait_time_in_seconds
Expand All @@ -47,4 +47,4 @@ def FixedRetryIntervalInMilliseconds(self):

@property
def MaxWaitTimeInSeconds(self):
return self._max_wait_time_in_seconds
return self._max_wait_time_in_seconds
36 changes: 34 additions & 2 deletions test/retry_policy_tests.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#The MIT License (MIT)
#The MIT License (MIT)
#Copyright (c) 2014 Microsoft Corporation

#Permission is hereby granted, free of charge, to any person obtaining a copy
Expand Down Expand Up @@ -222,7 +222,7 @@ def test_default_retry_policy_for_query(self):
result_docs = list(docs)
self.assertEqual(result_docs[0]['id'], 'doc1')
self.assertEqual(result_docs[1]['id'], 'doc2')
self.assertEqual(self.counter, 12)
self.assertEqual(self.counter, 6)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This test counts how many retries were done. Before the fix, 12 http requests were generated. Now 6 are. This is by-design. Updating test to match.


self.counter = 0
retry_utility._ExecuteFunction = self.OriginalExecuteFunction
Expand Down Expand Up @@ -278,6 +278,38 @@ def test_default_retry_policy_for_create(self):

retry_utility._ExecuteFunction = self.OriginalExecuteFunction

def test_429_on_first_page(self):
client = document_client.DocumentClient(Test_retry_policy_tests.host, {'masterKey': Test_retry_policy_tests.masterKey})

document_definition = { 'id': 'doc429',
'name': 'sample document',
'key': 'value'}

created_document = client.CreateDocument(self.created_collection['_self'], document_definition)

# Mock an overloaded server which always returns 429 Too Many
# Requests, by hooking the client's POST method.
original_post_function = client._DocumentClient__Post
client._DocumentClient__Post = self._MockPost429

# Test: query for the document. Expect the mock overloaded server
# to raise a 429 exception.
try:
query = client.QueryDocuments(self.created_collection['_self'], "SELECT * FROM c")
docs = list(query) # force execution now
self.assertFalse(True, 'function should raise HTTPFailure.')

except errors.HTTPFailure as ex:
self.assertEqual(ex.status_code, StatusCodes.TOO_MANY_REQUESTS)

client._DocumentClient__Post = original_post_function
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mocking out the server returning 429 error by overriding the clients __Post method. It needs to be done this low down, because mocking at the _Execute level (as in the rest of this file) misses out the lower level of retries.

client.DeleteDocument(created_document['_self'])

def _MockPost429(self, url, path, body, headers):
raise errors.HTTPFailure(StatusCodes.TOO_MANY_REQUESTS,
"Request rate is too large",
{HttpHeaders.RetryAfterInMilliseconds: 500})

def _MockExecuteFunction(self, function, *args, **kwargs):
raise errors.HTTPFailure(StatusCodes.TOO_MANY_REQUESTS, "Request rate is too large", {HttpHeaders.RetryAfterInMilliseconds: self.retry_after_in_milliseconds})

Expand Down