Skip to content

Commit

Permalink
Merge pull request googleapis#1 from O365/master
Browse files Browse the repository at this point in the history
merge janscas latest changes
  • Loading branch information
riptusk331 authored Nov 11, 2019
2 parents 1e69ef3 + cd46e6c commit 8f0d99a
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 25 deletions.
21 changes: 12 additions & 9 deletions O365/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -595,7 +595,7 @@ def refresh_token(self):
raise RuntimeError('Token not found.')

if token.is_long_lived or self.auth_flow_type == 'credentials':

log.info('Refreshing token')
if self.auth_flow_type == 'authorization':
client_id, client_secret = self.auth
self.token_backend.token = Token(
Expand All @@ -609,7 +609,7 @@ def refresh_token(self):
if self.request_token(None, store_token=False) is False:
log.error('Refresh for Client Credentials Grant Flow failed.')
return False

log.info('New oauth token fetched by refresh method')
else:
log.error('You can not refresh an access token that has no "refreh_token" available.'
'Include "offline_access" scope when authenticating to get a "refresh_token"')
Expand Down Expand Up @@ -675,23 +675,26 @@ def _internal_request(self, request_obj, url, method, **kwargs):
return response
except TokenExpiredError as e:
# Token has expired, try to refresh the token and try again on the next loop
log.info('Oauth Token is expired')
if self.token_backend.token.is_long_lived is False and self.auth_flow_type == 'authorization':
raise e
if token_refreshed:
# Refresh token done but still TokenExpiredError raise
raise RuntimeError('Token Refresh Operation not working')
if self.token_backend.should_refresh_token():
should_rt = self.token_backend.should_refresh_token(self)
if should_rt is True:
# The backend has checked that we can refresh the token
log.info('Oauth Token is expired, fetching a new token')
if self.refresh_token() is False:
raise RuntimeError('Token Refresh Operation not working')
log.info('New oauth token fetched')
token_refreshed = True
elif should_rt is False:
# the token was refreshed by another instance and updated into
# this instance, so: update the session token and
# go back to the loop and try the request again.
request_obj.token = self.token_backend.token
else:
# the token was refreshed by another token and updated into
# this instance, so: go back to the loop and try the request
# again.
pass
# the refresh was performed by the tokend backend.
token_refreshed = True

except (ConnectionError, ProxyError, SSLError, Timeout) as e:
# We couldn't connect to the target url, raise error
Expand Down
66 changes: 55 additions & 11 deletions O365/drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -1007,10 +1007,10 @@ def get_items(self, limit=None, *, query=None, order_by=None, batch=None):
params['$orderby'] = order_by

if query:
if query.has_filters:
warnings.warn('Filters are not allowed by the '
'Api Provider in this method')
query.clear_filters()
# if query.has_filters:
# warnings.warn('Filters are not allowed by the '
# 'Api Provider in this method')
# query.clear_filters()
if isinstance(query, str):
params['$filter'] = query
else:
Expand All @@ -1034,6 +1034,27 @@ def get_items(self, limit=None, *, query=None, order_by=None, batch=None):
else:
return items

def get_child_folders(self, limit=None, *, query=None, order_by=None, batch=None):
""" Returns all the folders inside this folder
:param int limit: max no. of folders to get. Over 999 uses batch.
:param query: applies a OData filter to the request
:type query: Query or str
:param order_by: orders the result set based on this condition
:type order_by: Query or str
:param int batch: batch size, retrieves items in
batches allowing to retrieve more items than the limit.
:return: list of items in this folder
:rtype: list[DriveItem] or Pagination
"""

if query:
query = query.on_attribute('folder').unequal(None)
else:
query = self.q('folder').unequal(None)

return self.get_items(limit=limit, query=query, order_by=order_by, batch=batch)

def create_child_folder(self, name, description=None):
""" Creates a Child Folder
Expand Down Expand Up @@ -1144,7 +1165,8 @@ def search(self, search_text, limit=None, *, query=None, order_by=None,
else:
return items

def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE):
def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE,
upload_in_chunks=False):
""" Uploads a file
:param item: path to the item you want to upload
Expand All @@ -1153,6 +1175,7 @@ def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE
:type item: str or Path
:param chunk_size: Only applies if file is bigger than 4MB.
Chunk size for uploads. Must be a multiple of 327.680 bytes
:param upload_in_chunks: force the method to upload the file in chunks
:return: uploaded file
:rtype: DriveItem
"""
Expand All @@ -1168,7 +1191,7 @@ def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE

file_size = item.stat().st_size

if file_size <= UPLOAD_SIZE_LIMIT_SIMPLE:
if not upload_in_chunks and file_size <= UPLOAD_SIZE_LIMIT_SIMPLE:
# Simple Upload
url = self.build_url(
self._endpoints.get('simple_upload').format(id=self.object_id,
Expand Down Expand Up @@ -1362,11 +1385,11 @@ def _base_get_list(self, url, limit=None, *, query=None, order_by=None,
params['$orderby'] = order_by

if query:
if query.has_filters:
warnings.warn(
'Filters are not allowed by the Api Provider '
'in this method')
query.clear_filters()
# if query.has_filters:
# warnings.warn(
# 'Filters are not allowed by the Api Provider '
# 'in this method')
# query.clear_filters()
if isinstance(query, str):
params['$filter'] = query
else:
Expand Down Expand Up @@ -1415,6 +1438,27 @@ def get_items(self, limit=None, *, query=None, order_by=None, batch=None):
return self._base_get_list(url, limit=limit, query=query,
order_by=order_by, batch=batch)

def get_child_folders(self, limit=None, *, query=None, order_by=None, batch=None):
""" Returns all the folders inside this folder
:param int limit: max no. of folders to get. Over 999 uses batch.
:param query: applies a OData filter to the request
:type query: Query or str
:param order_by: orders the result set based on this condition
:type order_by: Query or str
:param int batch: batch size, retrieves items in
batches allowing to retrieve more items than the limit.
:return: list of items in this folder
:rtype: list[DriveItem] or Pagination
"""

if query:
query = query.on_attribute('folder').unequal(None)
else:
query = self.q('folder').unequal(None)

return self.get_items(limit=limit, query=query, order_by=order_by, batch=batch)

def get_recent(self, limit=None, *, query=None, order_by=None, batch=None):
""" Returns a collection of recently used DriveItems
Expand Down
14 changes: 11 additions & 3 deletions O365/utils/token.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def check_token(self):
""" Optional Abstract method to check for the token existence """
raise NotImplementedError

def should_refresh_token(self):
def should_refresh_token(self, con=None):
"""
This method is intended to be implemented for environments
where multiple Connection instances are running on paralel.
Expand Down Expand Up @@ -117,13 +117,21 @@ def should_refresh_token(self):
If this returns True, then the Connection will refresh the token.
If this returns False, then the Connection will NOT refresh the token.
If this returns None, then this method already executed the refresh and therefore
the Connection does not have to.
By default this always returns True
There is an example of this in the examples folder.
:rtype: bool
:return: True if the Connection can refresh the token false otherwise.
:param Connection con: the connection that calls this method. This
is passed because maybe the locking mechanism needs to refresh the
token within the lock applied in this method.
:rtype: bool or None
:return: True if the Connection can refresh the token
False if the Connection should not refresh the token
None if the token was refreshed and therefore the
Connection should do nothing.
"""
return True

Expand Down
2 changes: 2 additions & 0 deletions O365/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -921,6 +921,8 @@ def _parse_filter_word(self, word):
word.isoformat()) # convert datetime to isoformat
elif isinstance(word, bool):
word = str(word).lower()
elif word is None:
word = 'null'
return word

@staticmethod
Expand Down
4 changes: 2 additions & 2 deletions examples/token_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
# This is an implementation of the 'should_refresh_token' method


class MyFirestoreBackend(FirestoreBackend):
class LockableFirestoreBackend(FirestoreBackend):
"""
A firestore backend that can answer to
'should_refresh_token'. Synchronous.
Expand Down Expand Up @@ -48,7 +48,7 @@ def _check_refresh_flag(self):
return token
return None

def should_refresh_token(self):
def should_refresh_token(self, con=None):
# 1) check if the token is already a new one:
new_token = self.load_token()
if new_token and new_token.get('access_token') != self.token.get('access_token'):
Expand Down

0 comments on commit 8f0d99a

Please sign in to comment.