added wait_for_connection function

This commit is contained in:
Sam Schott 2018-11-29 01:29:59 +00:00
parent fd2abf7548
commit cac510c726
3 changed files with 102 additions and 21 deletions

View File

@ -19,11 +19,34 @@ logger = logging.getLogger(__name__)
SESSION = dropbox.dropbox.create_session()
def megabytes_to_bytes(size_mb):
def tobytes(value, unit, bsize=1024):
"""
Convert size in bytes to megabytes
Convert size from megabytes to bytes.
:param int b: Value in bytes.
:param str unit: Unit to convert to. 'KB' to 'EB' are supported.
:param int bsize: Conversion between bytes and next higher unit.
:return: Coverted value in units of `to`.
:rtype: float
"""
return size_mb * 1024 * 1024
a = {'KB': 1, 'MB': 2, 'GB': 3, 'TB': 4, 'PB': 5, 'EB': 6}
return float(value) * bsize**a[unit.upper()]
def bytesto(value, unit, bsize=1024):
"""
Convert size from megabytes to bytes.
:param int b: Value in bytes.
:param str unit: Unit to convert to. 'KB' to 'EB' are supported.
:param int bsize: Conversion between bytes and next higher unit.
:return: Coverted value in units of `to`.
:rtype: float
"""
a = {'KB': 1, 'MB': 2, 'GB': 3, 'TB': 4, 'PB': 5, 'EB': 6}
return float(value) / bsize**a[unit.upper()]
class OAuth2Session(object):
@ -268,6 +291,42 @@ class SisyphosClient(object):
with open(self.rev_file, 'wb+') as f:
pickle.dump(self.rev_dict, f, pickle.HIGHEST_PROTOCOL)
def get_account_info(self):
"""
Gets current account information.
:return: :class:`dropbox.users.FullAccount` instance or `None` if failed.
"""
try:
res = self.dbx.users_get_current_account()
except dropbox.exceptions.ApiError as err:
logging.info("Failed to get account info: %s", err)
res = None
return res
def get_space_usage(self):
"""
Gets current account space usage.
:return: :class:`dropbox.users.SpaceUsage` instance or `None` if failed.
"""
try:
res = self.dbx.users_get_space_usage()
except dropbox.exceptions.ApiError as err:
logging.debug("Failed to get account info: %s", err)
return None
if res.allocation.is_team():
allocation = res.allocation.get_team()
elif res.allocation.is_individual():
allocation = res.allocation.get_individual()
percent = allocation.used / allocation.allocated * 100
alloc_gb = bytesto(allocation.allocated, 'GB')
logging.info("{:.1f}% of {:,}GB used".format(percent, alloc_gb))
return res
def unlink(self):
"""
Unlinks the Dropbox account.
@ -376,7 +435,7 @@ class SisyphosClient(object):
"""
file_size = osp.getsize(local_path)
chunk_size = megabytes_to_bytes(chunk_size)
chunk_size = tobytes(chunk_size, 'MB')
pb = tqdm(total=file_size, unit="B", unit_scale=True,
desc=osp.basename(local_path), miniters=1,

View File

@ -6,12 +6,13 @@ __author__ = "Sam Schott"
import os
import os.path as osp
import time
import requests
import shutil
import functools
from dropbox import files
from sisyphosdbx.client import SisyphosClient
from sisyphosdbx.monitor import LocalMonitor, RemoteMonitor
from sisyphosdbx.monitor import LocalMonitor, RemoteMonitor, wait_for_connection
from sisyphosdbx.config.main import CONF
from sisyphosdbx.config.base import get_home_dir
@ -52,11 +53,10 @@ def repeat_on_connection_error(f):
while True:
try:
ret = f(self, *args, **kwargs)
break
except ConnectionError:
logger.info("Connecting...")
time.sleep(1)
return ret
return ret
except requests.exceptions.ConnectionError:
wait_for_connection()
return wrapper

View File

@ -2,6 +2,7 @@ import os.path as osp
import logging
import time
import threading
import requests
from queue import Queue
import dropbox
@ -14,14 +15,37 @@ from watchdog.events import (DirModifiedEvent, FileModifiedEvent,
DirDeletedEvent, FileDeletedEvent)
from watchdog.utils.dirsnapshot import DirectorySnapshot
from sisyphosdbx.client import SESSION
from sisyphosdbx.client import SisyphosClient
from sisyphosdbx.config.main import CONF, SUBFOLDER
from sisyphosdbx.config.base import get_conf_path
configurationDirectory = get_conf_path(SUBFOLDER)
logger = logging.getLogger(__name__)
lock = threading.Lock()
lock = threading.Lock() # lock to prevent simultaneous calls to Dropbox
client = SisyphosClient() # global client for connection checking etc
def wait_for_connection(timeout=None):
"""
Helper function which blocks until Dropbox can be reached.
:param timeout: Timeout in sec before function raises TimeoutError. Default
is `None`.
:raises TimeoutError: If connection could not be established within timeout.
"""
t0 = time.time()
while not timeout or (time.time() - t0 < timeout):
try:
# use an inexpensive call to space usage to test connection
client.get_space_usage()
return # return if successful
except requests.exceptions.ConnectionError:
logger.info("Connecting...")
time.sleep(1)
raise TimeoutError("Timeout of %s sec exceeded." % timeout)
class TimedQueue(Queue):
@ -243,10 +267,9 @@ class GetRemoteChangesThread(threading.Thread):
else:
logger.info("Up to date")
except ConnectionError: # TODO: determine correct exc to catch
except requests.exceptions.ConnectionError:
logger.debug("Connection lost")
logger.info("Connecting...") # TODO: handle lost connection
# block until reconnect
wait_for_connection()
def pause(self):
self.pause_event.set()
@ -275,7 +298,7 @@ class ProcessLocalChangesThread(threading.Thread):
super(self.__class__, self).__init__()
self.dbx_handler = dbx_handler
self.event_q = event_q
self.delay = 0.5
self.delay = 0.1
def run(self):
while not self.stop_event.is_set():
@ -335,12 +358,11 @@ class ProcessLocalChangesThread(threading.Thread):
elif event.event_type is EVENT_TYPE_MODIFIED:
self.dbx_handler.on_modified(event)
logger.info("Up to date")
except ConnectionError: # TODO: determine correct exc to catch
except requests.exceptions.ConnectionError:
logger.debug("Connection lost")
logger.info("Connecting...")
# TODO: handle lost connection
# block until reconnect
# upon reconnect, call upload_local_changes_after_inactive
# TODO: handle lost connection, stop Observer?
wait_for_connection()
# TODO: upon reconnect, call upload_local_changes_after_inactive
def pause(self):
self.pause_event.set()