From b1d2ac5797ab0e747ff12a2a004c4bb6e4a4c5d3 Mon Sep 17 00:00:00 2001 From: Alejandro Casanovas Date: Mon, 11 Nov 2019 12:15:03 +0100 Subject: [PATCH 1/3] should_refresh_token now accepts a connection optional param (#350) It now returns a tri-state value with different meaning (True, False or None). updated Connection._internal_request to check for the tri-state value Connection now updates the session token when the refresh was performed by another instance. Updated token_backends-py example --- O365/connection.py | 21 ++++++++++++--------- O365/utils/token.py | 14 +++++++++++--- examples/token_backends.py | 4 ++-- 3 files changed, 25 insertions(+), 14 deletions(-) diff --git a/O365/connection.py b/O365/connection.py index 28e1c7436851..ad7190128f95 100644 --- a/O365/connection.py +++ b/O365/connection.py @@ -595,7 +595,7 @@ def refresh_token(self): raise RuntimeError('Token not found.') if token.is_long_lived or self.auth_flow_type == 'credentials': - + log.info('Refreshing token') if self.auth_flow_type == 'authorization': client_id, client_secret = self.auth self.token_backend.token = Token( @@ -609,7 +609,7 @@ def refresh_token(self): if self.request_token(None, store_token=False) is False: log.error('Refresh for Client Credentials Grant Flow failed.') return False - + log.info('New oauth token fetched by refresh method') else: log.error('You can not refresh an access token that has no "refreh_token" available.' 'Include "offline_access" scope when authenticating to get a "refresh_token"') @@ -675,23 +675,26 @@ def _internal_request(self, request_obj, url, method, **kwargs): return response except TokenExpiredError as e: # Token has expired, try to refresh the token and try again on the next loop + log.info('Oauth Token is expired') if self.token_backend.token.is_long_lived is False and self.auth_flow_type == 'authorization': raise e if token_refreshed: # Refresh token done but still TokenExpiredError raise raise RuntimeError('Token Refresh Operation not working') - if self.token_backend.should_refresh_token(): + should_rt = self.token_backend.should_refresh_token(self) + if should_rt is True: # The backend has checked that we can refresh the token - log.info('Oauth Token is expired, fetching a new token') if self.refresh_token() is False: raise RuntimeError('Token Refresh Operation not working') - log.info('New oauth token fetched') token_refreshed = True + elif should_rt is False: + # the token was refreshed by another instance and updated into + # this instance, so: update the session token and + # go back to the loop and try the request again. + request_obj.token = self.token_backend.token else: - # the token was refreshed by another token and updated into - # this instance, so: go back to the loop and try the request - # again. - pass + # the refresh was performed by the tokend backend. + token_refreshed = True except (ConnectionError, ProxyError, SSLError, Timeout) as e: # We couldn't connect to the target url, raise error diff --git a/O365/utils/token.py b/O365/utils/token.py index 81b2b9a73364..bb191145334b 100644 --- a/O365/utils/token.py +++ b/O365/utils/token.py @@ -89,7 +89,7 @@ def check_token(self): """ Optional Abstract method to check for the token existence """ raise NotImplementedError - def should_refresh_token(self): + def should_refresh_token(self, con=None): """ This method is intended to be implemented for environments where multiple Connection instances are running on paralel. @@ -117,13 +117,21 @@ def should_refresh_token(self): If this returns True, then the Connection will refresh the token. If this returns False, then the Connection will NOT refresh the token. + If this returns None, then this method already executed the refresh and therefore + the Connection does not have to. By default this always returns True There is an example of this in the examples folder. - :rtype: bool - :return: True if the Connection can refresh the token false otherwise. + :param Connection con: the connection that calls this method. This + is passed because maybe the locking mechanism needs to refresh the + token within the lock applied in this method. + :rtype: bool or None + :return: True if the Connection can refresh the token + False if the Connection should not refresh the token + None if the token was refreshed and therefore the + Connection should do nothing. """ return True diff --git a/examples/token_backends.py b/examples/token_backends.py index d71d76a5f900..c426836ff7c4 100644 --- a/examples/token_backends.py +++ b/examples/token_backends.py @@ -10,7 +10,7 @@ # This is an implementation of the 'should_refresh_token' method -class MyFirestoreBackend(FirestoreBackend): +class LockableFirestoreBackend(FirestoreBackend): """ A firestore backend that can answer to 'should_refresh_token'. Synchronous. @@ -48,7 +48,7 @@ def _check_refresh_flag(self): return token return None - def should_refresh_token(self): + def should_refresh_token(self, con=None): # 1) check if the token is already a new one: new_token = self.load_token() if new_token and new_token.get('access_token') != self.token.get('access_token'): From 7e193cb1f4ed7cd450099c26f75d83c0a6ea12b9 Mon Sep 17 00:00:00 2001 From: Alejandro Casanovas Date: Mon, 11 Nov 2019 16:26:37 +0100 Subject: [PATCH 2/3] Drive: `upload_file` method can now force the upload type to be in chunks --- O365/drive.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/O365/drive.py b/O365/drive.py index 71340953c0e7..36d6d46fa0e7 100644 --- a/O365/drive.py +++ b/O365/drive.py @@ -1144,7 +1144,8 @@ def search(self, search_text, limit=None, *, query=None, order_by=None, else: return items - def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE): + def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE, + upload_in_chunks=False): """ Uploads a file :param item: path to the item you want to upload @@ -1153,6 +1154,7 @@ def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE :type item: str or Path :param chunk_size: Only applies if file is bigger than 4MB. Chunk size for uploads. Must be a multiple of 327.680 bytes + :param upload_in_chunks: force the method to upload the file in chunks :return: uploaded file :rtype: DriveItem """ @@ -1168,7 +1170,7 @@ def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE file_size = item.stat().st_size - if file_size <= UPLOAD_SIZE_LIMIT_SIMPLE: + if not upload_in_chunks and file_size <= UPLOAD_SIZE_LIMIT_SIMPLE: # Simple Upload url = self.build_url( self._endpoints.get('simple_upload').format(id=self.object_id, From cd46e6cb9cba0f35a784fcbf448c6adeaa94e633 Mon Sep 17 00:00:00 2001 From: Alejandro Casanovas Date: Mon, 11 Nov 2019 17:16:00 +0100 Subject: [PATCH 3/3] Query: Fixed bug when filter on None. None should be treated as null. Drive: Added `get_child_folders` method on Drives and Folders --- O365/drive.py | 60 ++++++++++++++++++++++++++++++++++++++------- O365/utils/utils.py | 2 ++ 2 files changed, 53 insertions(+), 9 deletions(-) diff --git a/O365/drive.py b/O365/drive.py index 36d6d46fa0e7..64206b9aa7a3 100644 --- a/O365/drive.py +++ b/O365/drive.py @@ -1007,10 +1007,10 @@ def get_items(self, limit=None, *, query=None, order_by=None, batch=None): params['$orderby'] = order_by if query: - if query.has_filters: - warnings.warn('Filters are not allowed by the ' - 'Api Provider in this method') - query.clear_filters() + # if query.has_filters: + # warnings.warn('Filters are not allowed by the ' + # 'Api Provider in this method') + # query.clear_filters() if isinstance(query, str): params['$filter'] = query else: @@ -1034,6 +1034,27 @@ def get_items(self, limit=None, *, query=None, order_by=None, batch=None): else: return items + def get_child_folders(self, limit=None, *, query=None, order_by=None, batch=None): + """ Returns all the folders inside this folder + + :param int limit: max no. of folders to get. Over 999 uses batch. + :param query: applies a OData filter to the request + :type query: Query or str + :param order_by: orders the result set based on this condition + :type order_by: Query or str + :param int batch: batch size, retrieves items in + batches allowing to retrieve more items than the limit. + :return: list of items in this folder + :rtype: list[DriveItem] or Pagination + """ + + if query: + query = query.on_attribute('folder').unequal(None) + else: + query = self.q('folder').unequal(None) + + return self.get_items(limit=limit, query=query, order_by=order_by, batch=batch) + def create_child_folder(self, name, description=None): """ Creates a Child Folder @@ -1364,11 +1385,11 @@ def _base_get_list(self, url, limit=None, *, query=None, order_by=None, params['$orderby'] = order_by if query: - if query.has_filters: - warnings.warn( - 'Filters are not allowed by the Api Provider ' - 'in this method') - query.clear_filters() + # if query.has_filters: + # warnings.warn( + # 'Filters are not allowed by the Api Provider ' + # 'in this method') + # query.clear_filters() if isinstance(query, str): params['$filter'] = query else: @@ -1417,6 +1438,27 @@ def get_items(self, limit=None, *, query=None, order_by=None, batch=None): return self._base_get_list(url, limit=limit, query=query, order_by=order_by, batch=batch) + def get_child_folders(self, limit=None, *, query=None, order_by=None, batch=None): + """ Returns all the folders inside this folder + + :param int limit: max no. of folders to get. Over 999 uses batch. + :param query: applies a OData filter to the request + :type query: Query or str + :param order_by: orders the result set based on this condition + :type order_by: Query or str + :param int batch: batch size, retrieves items in + batches allowing to retrieve more items than the limit. + :return: list of items in this folder + :rtype: list[DriveItem] or Pagination + """ + + if query: + query = query.on_attribute('folder').unequal(None) + else: + query = self.q('folder').unequal(None) + + return self.get_items(limit=limit, query=query, order_by=order_by, batch=batch) + def get_recent(self, limit=None, *, query=None, order_by=None, batch=None): """ Returns a collection of recently used DriveItems diff --git a/O365/utils/utils.py b/O365/utils/utils.py index c95d0a1c3300..206e9ccba78a 100644 --- a/O365/utils/utils.py +++ b/O365/utils/utils.py @@ -921,6 +921,8 @@ def _parse_filter_word(self, word): word.isoformat()) # convert datetime to isoformat elif isinstance(word, bool): word = str(word).lower() + elif word is None: + word = 'null' return word @staticmethod