mirror of
https://github.com/samschott/maestral.git
synced 2024-11-13 05:40:53 +03:00
Initial commit
This commit is contained in:
commit
8d9a893e3a
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
114
.gitignore
vendored
Normal file
114
.gitignore
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
21
LICENSE
Normal file
21
LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 Sam Schott
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
21
LICENSE.txt
Normal file
21
LICENSE.txt
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2018 Sam Schott
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
14
README.md
Normal file
14
README.md
Normal file
@ -0,0 +1,14 @@
|
||||
SisyphosDBX
|
||||
Open-source Dropbox client for macOS and Linux
|
||||
|
||||
---
|
||||
|
||||
---
|
||||
SisyphosDBX is an open-source Dropbox client written in Python. Project's main goal is to provide an open-source desktop Dropbox client for platforms that aren't supported. SisyphosDBX is script-based which makes it platform-independent. It's written using the Python SDK for Dropbox API v2.
|
||||
|
||||
[!] IMPORTANT [!]
|
||||
SisyphosDBX doesn't have production status yet, so only 100 accounts can use my API keys.
|
||||
|
||||
Installing dependencies:
|
||||
---
|
||||
Run "pip install -r requirements.txt" from command prompt/BASH shell.
|
132
cmdline_client.py
Normal file
132
cmdline_client.py
Normal file
@ -0,0 +1,132 @@
|
||||
import sys
|
||||
import os.path as osp
|
||||
|
||||
|
||||
def run_daemon():
|
||||
from sysiphusdbx import SisyphosClient, LocalMonitor, RemoteMonitor
|
||||
from config.main import CONF
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(logging.StreamHandler())
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
client = SisyphosClient()
|
||||
|
||||
first_sync = (not CONF.get('internal', 'lastsync') or
|
||||
CONF.get('internal', 'cursor') == '' or
|
||||
not osp.isdir(CONF.get('sysiphusdbx', 'path')))
|
||||
|
||||
if first_sync:
|
||||
from sysiphusdbx import Configure
|
||||
configure = Configure(client)
|
||||
configure.set_dropbox_directory()
|
||||
configure.ask_for_excluded_folders()
|
||||
CONF.set('internal', 'cursor', '')
|
||||
CONF.set('internal', 'lastsync', None)
|
||||
|
||||
result = False
|
||||
while not result:
|
||||
result = client.get_remote_dropbox()
|
||||
|
||||
remote = RemoteMonitor(client)
|
||||
local = LocalMonitor(client, remote)
|
||||
|
||||
local.upload_local_changes_after_inactive()
|
||||
|
||||
remote.start()
|
||||
local.start()
|
||||
|
||||
|
||||
# generate sys.argv dictionary
|
||||
if len(sys.argv) > 1:
|
||||
parameters = sys.argv[2:]
|
||||
wtd = sys.argv[1]
|
||||
else:
|
||||
wtd = "brick"
|
||||
|
||||
if wtd == "--client":
|
||||
from sysiphusdbx import client
|
||||
|
||||
print("""SisyphosDBX
|
||||
(c) Sam Schott, 2018
|
||||
made with Dropbox SDK from https://www.dropbox.com/developers/reference/sdk \n""")
|
||||
client.SisyphosClient()
|
||||
|
||||
elif wtd == "--help":
|
||||
print("""
|
||||
Syntax: sisyphosdbx [OPTION]
|
||||
|
||||
--help - displays this text
|
||||
--configuration - runs configuration wizard
|
||||
--client - runs SysiphusDBX API Client
|
||||
syntax: sisyphosdbx [OPTION]""")
|
||||
|
||||
elif wtd == "--configuration":
|
||||
from sysiphusdbx import SisyphosClient, Configure
|
||||
|
||||
client = SisyphosClient()
|
||||
configure = Configure(client)
|
||||
configure.set_dropbox_directory()
|
||||
configure.ask_for_excluded_folders()
|
||||
|
||||
elif wtd == "":
|
||||
from sysiphusdbx import SisyphosClient, LocalMonitor, RemoteMonitor
|
||||
from config.main import CONF
|
||||
|
||||
if CONF.get('sysiphusdbx', 'firstsync'):
|
||||
from sysiphusdbx import Configure
|
||||
configure = Configure()
|
||||
configure.set_dropbox_directory()
|
||||
configure.ask_for_excluded_folders()
|
||||
|
||||
client = SisyphosClient()
|
||||
|
||||
local = LocalMonitor(client)
|
||||
remote = RemoteMonitor(client)
|
||||
|
||||
local.start()
|
||||
remote.start()
|
||||
|
||||
run_daemon()
|
||||
|
||||
else:
|
||||
print("Invalid syntax. Type orphilia --help for more informations")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from sysiphusdbx import SisyphosClient, LocalMonitor, RemoteMonitor
|
||||
from config.main import CONF
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(logging.StreamHandler())
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
client = SisyphosClient()
|
||||
|
||||
first_sync = (not CONF.get('internal', 'lastsync') or
|
||||
CONF.get('internal', 'cursor') == '' or
|
||||
not osp.isdir(CONF.get('sysiphusdbx', 'path')))
|
||||
|
||||
if first_sync:
|
||||
from sysiphusdbx import Configure
|
||||
configure = Configure(client)
|
||||
configure.set_dropbox_directory()
|
||||
configure.ask_for_excluded_folders()
|
||||
CONF.set('internal', 'cursor', '')
|
||||
CONF.set('internal', 'lastsync', None)
|
||||
|
||||
result = False
|
||||
while not result:
|
||||
result = client.get_remote_dropbox()
|
||||
|
||||
remote = RemoteMonitor(client)
|
||||
local = LocalMonitor(client, remote)
|
||||
|
||||
local.upload_local_changes_after_inactive()
|
||||
|
||||
remote.start()
|
||||
local.start()
|
BIN
config/.DS_Store
vendored
Normal file
BIN
config/.DS_Store
vendored
Normal file
Binary file not shown.
0
config/__init__.py
Normal file
0
config/__init__.py
Normal file
80
config/base.py
Normal file
80
config/base.py
Normal file
@ -0,0 +1,80 @@
|
||||
"""
|
||||
Base configuration management
|
||||
|
||||
This file only deals with non-GUI configuration features
|
||||
(in other words, we won't import any PyQt object here, avoiding any
|
||||
sip API incompatibility issue in spyder's non-gui modules)
|
||||
"""
|
||||
|
||||
from __future__ import division, absolute_import
|
||||
import os.path as osp
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Configuration paths
|
||||
# =============================================================================
|
||||
|
||||
def get_home_dir():
|
||||
"""
|
||||
Return user home directory
|
||||
"""
|
||||
try:
|
||||
# expanduser() returns a raw byte string which needs to be
|
||||
# decoded with the codec that the OS is using to represent
|
||||
# file paths.
|
||||
path = osp.expanduser('~')
|
||||
except Exception:
|
||||
path = ''
|
||||
|
||||
if osp.isdir(path):
|
||||
return path
|
||||
else:
|
||||
# Get home from alternative locations
|
||||
for env_var in ('HOME', 'USERPROFILE', 'TMP'):
|
||||
# os.environ.get() returns a raw byte string which needs to be
|
||||
# decoded with the codec that the OS is using to represent
|
||||
# environment variables.
|
||||
path = os.environ.get(env_var, '')
|
||||
if osp.isdir(path):
|
||||
return path
|
||||
else:
|
||||
path = ''
|
||||
|
||||
if not path:
|
||||
raise RuntimeError('Please set the environment variable HOME to '
|
||||
'your user/home directory path so CustomXepr '
|
||||
'can start properly.')
|
||||
|
||||
|
||||
def get_conf_path(subfolder=None, filename=None):
|
||||
"""Return absolute path to the config file with the specified filename."""
|
||||
# Define conf_dir
|
||||
conf_dir = osp.join(get_home_dir(), subfolder)
|
||||
|
||||
# Create conf_dir
|
||||
if not osp.isdir(conf_dir):
|
||||
os.mkdir(conf_dir)
|
||||
if filename is None:
|
||||
return conf_dir
|
||||
else:
|
||||
return osp.join(conf_dir, filename)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Reset config files
|
||||
# =============================================================================
|
||||
|
||||
def reset_config_files(subfolder, saved_config_files):
|
||||
"""Remove all config files"""
|
||||
print("*** Reset CustomXepr settings to defaults ***")
|
||||
for fname in saved_config_files:
|
||||
cfg_fname = get_conf_path(subfolder, fname)
|
||||
if osp.isfile(cfg_fname) or osp.islink(cfg_fname):
|
||||
os.remove(cfg_fname)
|
||||
elif osp.isdir(cfg_fname):
|
||||
shutil.rmtree(cfg_fname)
|
||||
else:
|
||||
continue
|
||||
print("removing:", cfg_fname)
|
60
config/main.py
Normal file
60
config/main.py
Normal file
@ -0,0 +1,60 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © Spyder Project Contributors
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder/__init__.py for details)
|
||||
|
||||
"""
|
||||
CustomXepr configuration options
|
||||
|
||||
Note: Leave this file free of Qt related imports, so that it can be used to
|
||||
quickly load a user config file
|
||||
"""
|
||||
|
||||
# Local import
|
||||
from config.user import UserConfig
|
||||
|
||||
PACKAGE_NAME = 'Orphilia'
|
||||
SUBFOLDER = '.%s' % PACKAGE_NAME
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Defaults
|
||||
# =============================================================================
|
||||
DEFAULTS = [
|
||||
('main',
|
||||
{
|
||||
'path': '',
|
||||
'excluded_folders': [],
|
||||
'exlcuded_files': ["desktop.ini", "thumbs.db", ".DS_Store",
|
||||
"icon\r", ".dropbox", ".dropbox.attr"],
|
||||
}),
|
||||
('internal',
|
||||
{
|
||||
'cursor': '',
|
||||
'lastsync': None,
|
||||
}),
|
||||
]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Config instance
|
||||
# =============================================================================
|
||||
# IMPORTANT NOTES:
|
||||
# 1. If you want to *change* the default value of a current option, you need to
|
||||
# do a MINOR update in config version, e.g. from 3.0.0 to 3.1.0
|
||||
# 2. If you want to *remove* options that are no longer needed in our codebase,
|
||||
# or if you want to *rename* options, then you need to do a MAJOR update in
|
||||
# version, e.g. from 3.0.0 to 4.0.0
|
||||
# 3. You don't need to touch this value if you're just adding a new option
|
||||
CONF_VERSION = '2.0.0'
|
||||
|
||||
# Main configuration instance
|
||||
try:
|
||||
CONF = UserConfig(PACKAGE_NAME, defaults=DEFAULTS, load=True,
|
||||
version=CONF_VERSION, subfolder=SUBFOLDER, backup=True,
|
||||
raw_mode=True)
|
||||
except Exception:
|
||||
CONF = UserConfig(PACKAGE_NAME, defaults=DEFAULTS, load=False,
|
||||
version=CONF_VERSION, subfolder=SUBFOLDER, backup=True,
|
||||
raw_mode=True)
|
474
config/user.py
Normal file
474
config/user.py
Normal file
@ -0,0 +1,474 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module provides user configuration file management features.
|
||||
|
||||
It's based on the ConfigParser module (present in the standard library).
|
||||
"""
|
||||
|
||||
# Std imports
|
||||
import ast
|
||||
import os
|
||||
import os.path as osp
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
import codecs
|
||||
import configparser as cp
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
# Local imports
|
||||
from config.base import get_conf_path, get_home_dir
|
||||
|
||||
PY2 = sys.version[0] == '2'
|
||||
|
||||
|
||||
def is_text_string(obj):
|
||||
"""Return True if `obj` is a text string, False if it is anything else,
|
||||
like binary data (Python 3) or QString (Python 2, PyQt API #1)"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return isinstance(obj, basestring)
|
||||
else:
|
||||
# Python 2
|
||||
return isinstance(obj, str)
|
||||
|
||||
|
||||
def is_stable_version(version):
|
||||
"""
|
||||
A stable version has no letters in the final component, but only numbers.
|
||||
|
||||
Stable version example: 1.2, 1.3.4, 1.0.5
|
||||
Not stable version: 1.2alpha, 1.3.4beta, 0.1.0rc1, 3.0.0dev
|
||||
"""
|
||||
if not isinstance(version, tuple):
|
||||
version = version.split('.')
|
||||
last_part = version[-1]
|
||||
|
||||
if not re.search(r'[a-zA-Z]', last_part):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def check_version(actver, version, cmp_op):
|
||||
"""
|
||||
Check version string of an active module against a required version.
|
||||
|
||||
If dev/prerelease tags result in TypeError for string-number comparison,
|
||||
it is assumed that the dependency is satisfied.
|
||||
Users on dev branches are responsible for keeping their own packages up to
|
||||
date.
|
||||
|
||||
Copyright (C) 2013 The IPython Development Team
|
||||
|
||||
Distributed under the terms of the BSD License.
|
||||
"""
|
||||
if isinstance(actver, tuple):
|
||||
actver = '.'.join([str(i) for i in actver])
|
||||
|
||||
# Hacks needed so that LooseVersion understands that (for example)
|
||||
# version = '3.0.0' is in fact bigger than actver = '3.0.0rc1'
|
||||
if is_stable_version(version) and not is_stable_version(actver) and \
|
||||
actver.startswith(version) and version != actver:
|
||||
version = version + 'zz'
|
||||
elif is_stable_version(actver) and not is_stable_version(version) and \
|
||||
version.startswith(actver) and version != actver:
|
||||
actver = actver + 'zz'
|
||||
|
||||
try:
|
||||
if cmp_op == '>':
|
||||
return LooseVersion(actver) > LooseVersion(version)
|
||||
elif cmp_op == '>=':
|
||||
return LooseVersion(actver) >= LooseVersion(version)
|
||||
elif cmp_op == '=':
|
||||
return LooseVersion(actver) == LooseVersion(version)
|
||||
elif cmp_op == '<':
|
||||
return LooseVersion(actver) < LooseVersion(version)
|
||||
elif cmp_op == '<=':
|
||||
return LooseVersion(actver) <= LooseVersion(version)
|
||||
else:
|
||||
return False
|
||||
except TypeError:
|
||||
return True
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Auxiliary classes
|
||||
# =============================================================================
|
||||
|
||||
class NoDefault:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Defaults class
|
||||
# =============================================================================
|
||||
|
||||
class DefaultsConfig(cp.ConfigParser):
|
||||
"""
|
||||
Class used to save defaults to a file and as base class for
|
||||
UserConfig
|
||||
"""
|
||||
def __init__(self, name, subfolder):
|
||||
if PY2:
|
||||
cp.ConfigParser.__init__(self)
|
||||
else:
|
||||
cp.ConfigParser.__init__(self, interpolation=None)
|
||||
|
||||
self.name = name
|
||||
self.subfolder = subfolder
|
||||
|
||||
self.optionxform = str
|
||||
|
||||
def _set(self, section, option, value, verbose):
|
||||
"""
|
||||
Private set method
|
||||
"""
|
||||
if not self.has_section(section):
|
||||
self.add_section(section)
|
||||
if not is_text_string(value):
|
||||
value = repr(value)
|
||||
if verbose:
|
||||
print('%s[ %s ] = %s' % (section, option, value))
|
||||
cp.ConfigParser.set(self, section, option, value)
|
||||
|
||||
def _save(self):
|
||||
"""
|
||||
Save config into the associated .ini file
|
||||
"""
|
||||
# See Issue 1086 and 1242 for background on why this
|
||||
# method contains all the exception handling.
|
||||
fname = self.filename()
|
||||
|
||||
def _write_file(fname):
|
||||
if PY2:
|
||||
# Python 2
|
||||
with codecs.open(fname, 'w', encoding='utf-8') as configfile:
|
||||
self.write(configfile)
|
||||
else:
|
||||
# Python 3
|
||||
with open(fname, 'w', encoding='utf-8') as configfile:
|
||||
self.write(configfile)
|
||||
|
||||
try: # the "easy" way
|
||||
_write_file(fname)
|
||||
except IOError:
|
||||
try: # the "delete and sleep" way
|
||||
if osp.isfile(fname):
|
||||
os.remove(fname)
|
||||
time.sleep(0.05)
|
||||
_write_file(fname)
|
||||
except Exception as e:
|
||||
print("Failed to write user configuration file.")
|
||||
print("Please submit a bug report.")
|
||||
raise(e)
|
||||
|
||||
def filename(self):
|
||||
"""Create a .ini filename located in user home directory.
|
||||
This .ini files stores the global package preferences.
|
||||
"""
|
||||
if self.subfolder is None:
|
||||
config_file = osp.join(get_home_dir(), '.%s.ini' % self.name)
|
||||
return config_file
|
||||
else:
|
||||
folder = get_conf_path(self.subfolder)
|
||||
# Save defaults in a "defaults" dir of subfolder to not pollute it
|
||||
if 'defaults' in self.name:
|
||||
folder = osp.join(folder, 'defaults')
|
||||
if not osp.isdir(folder):
|
||||
os.mkdir(folder)
|
||||
config_file = osp.join(folder, '%s.ini' % self.name)
|
||||
return config_file
|
||||
|
||||
def set_defaults(self, defaults):
|
||||
for section, options in defaults:
|
||||
for option in options:
|
||||
new_value = options[option]
|
||||
self._set(section, option, new_value, False)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# User config class
|
||||
# =============================================================================
|
||||
|
||||
class UserConfig(DefaultsConfig):
|
||||
"""
|
||||
UserConfig class, based on ConfigParser
|
||||
name: name of the config
|
||||
defaults: dictionnary containing options
|
||||
*or* list of tuples (section_name, options)
|
||||
version: version of the configuration file (X.Y.Z format)
|
||||
subfolder: configuration file will be saved in %home%/subfolder/%name%.ini
|
||||
|
||||
Note that 'get' and 'set' arguments number and type
|
||||
differ from the overriden methods
|
||||
"""
|
||||
DEFAULT_SECTION_NAME = 'main'
|
||||
|
||||
def __init__(self, name, defaults=None, load=True, version=None,
|
||||
subfolder=None, backup=False, raw_mode=False,
|
||||
remove_obsolete=False):
|
||||
DefaultsConfig.__init__(self, name, subfolder)
|
||||
self.raw = 1 if raw_mode else 0
|
||||
if (version is not None and
|
||||
re.match(r'^(\d+).(\d+).(\d+)$', version) is None):
|
||||
raise ValueError("Version number %r is incorrect - must be in X.Y.Z format" % version)
|
||||
if isinstance(defaults, dict):
|
||||
defaults = [(self.DEFAULT_SECTION_NAME, defaults)]
|
||||
self.defaults = defaults
|
||||
if defaults is not None:
|
||||
self.reset_to_defaults(save=False)
|
||||
fname = self.filename()
|
||||
if backup:
|
||||
try:
|
||||
shutil.copyfile(fname, "%s.bak" % fname)
|
||||
except IOError:
|
||||
pass
|
||||
if load:
|
||||
# If config file already exists, it overrides Default options:
|
||||
self.load_from_ini()
|
||||
old_ver = self.get_version(version)
|
||||
_major = lambda _t: _t[:_t.find('.')]
|
||||
_minor = lambda _t: _t[:_t.rfind('.')]
|
||||
# Save new defaults
|
||||
self._save_new_defaults(defaults, version, subfolder)
|
||||
# Updating defaults only if major/minor version is different
|
||||
if _minor(version) != _minor(old_ver):
|
||||
if backup:
|
||||
try:
|
||||
shutil.copyfile(fname, "%s-%s.bak" % (fname, old_ver))
|
||||
except IOError:
|
||||
pass
|
||||
if check_version(old_ver, '2.4.0', '<'):
|
||||
self.reset_to_defaults(save=False)
|
||||
else:
|
||||
self._update_defaults(defaults, old_ver)
|
||||
# Remove deprecated options if major version has changed
|
||||
if remove_obsolete or _major(version) != _major(old_ver):
|
||||
self._remove_deprecated_options(old_ver)
|
||||
# Set new version number
|
||||
self.set_version(version, save=False)
|
||||
if defaults is None:
|
||||
# If no defaults are defined, set .ini file settings as default
|
||||
self.set_as_defaults()
|
||||
|
||||
def get_version(self, version='0.0.0'):
|
||||
"""Return configuration (not application!) version"""
|
||||
return self.get(self.DEFAULT_SECTION_NAME, 'version', version)
|
||||
|
||||
def set_version(self, version='0.0.0', save=True):
|
||||
"""Set configuration (not application!) version"""
|
||||
self.set(self.DEFAULT_SECTION_NAME, 'version', version, save=save)
|
||||
|
||||
def load_from_ini(self):
|
||||
"""
|
||||
Load config from the associated .ini file
|
||||
"""
|
||||
try:
|
||||
fname = self.filename()
|
||||
if osp.isfile(fname):
|
||||
try:
|
||||
with codecs.open(fname, encoding='utf-8') as configfile:
|
||||
self.readfp(configfile)
|
||||
except IOError:
|
||||
print("Failed reading file", fname)
|
||||
|
||||
except cp.MissingSectionHeaderError:
|
||||
print("Warning: File contains no section headers.")
|
||||
|
||||
def _load_old_defaults(self, old_version):
|
||||
"""Read old defaults"""
|
||||
old_defaults = cp.ConfigParser()
|
||||
path = osp.dirname(self.filename())
|
||||
path = osp.join(path, 'defaults')
|
||||
old_defaults.read(osp.join(path, 'defaults-'+old_version+'.ini'))
|
||||
return old_defaults
|
||||
|
||||
def _save_new_defaults(self, defaults, new_version, subfolder):
|
||||
"""Save new defaults"""
|
||||
new_defaults = DefaultsConfig(name='defaults-'+new_version,
|
||||
subfolder=subfolder)
|
||||
if not osp.isfile(new_defaults.filename()):
|
||||
new_defaults.set_defaults(defaults)
|
||||
new_defaults._save()
|
||||
|
||||
def _update_defaults(self, defaults, old_version, verbose=False):
|
||||
"""Update defaults after a change in version"""
|
||||
old_defaults = self._load_old_defaults(old_version)
|
||||
for section, options in defaults:
|
||||
for option in options:
|
||||
new_value = options[option]
|
||||
try:
|
||||
old_value = old_defaults.get(section, option)
|
||||
except (cp.NoSectionError, cp.NoOptionError):
|
||||
old_value = None
|
||||
if old_value is None or str(new_value) != old_value:
|
||||
self._set(section, option, new_value, verbose)
|
||||
|
||||
def _remove_deprecated_options(self, old_version):
|
||||
"""
|
||||
Remove options which are present in the .ini file but not in defaults
|
||||
"""
|
||||
old_defaults = self._load_old_defaults(old_version)
|
||||
for section in old_defaults.sections():
|
||||
for option, _ in old_defaults.items(section, raw=self.raw):
|
||||
if self.get_default(section, option) is NoDefault:
|
||||
try:
|
||||
self.remove_option(section, option)
|
||||
if len(self.items(section, raw=self.raw)) == 0:
|
||||
self.remove_section(section)
|
||||
except cp.NoSectionError:
|
||||
self.remove_section(section)
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
Remove .ini file associated to config
|
||||
"""
|
||||
os.remove(self.filename())
|
||||
|
||||
def set_as_defaults(self):
|
||||
"""
|
||||
Set defaults from the current config
|
||||
"""
|
||||
self.defaults = []
|
||||
for section in self.sections():
|
||||
secdict = {}
|
||||
for option, value in self.items(section, raw=self.raw):
|
||||
secdict[option] = value
|
||||
self.defaults.append((section, secdict))
|
||||
|
||||
def reset_to_defaults(self, save=True, verbose=False, section=None):
|
||||
"""
|
||||
Reset config to Default values
|
||||
"""
|
||||
for sec, options in self.defaults:
|
||||
if section is None or section == sec:
|
||||
for option in options:
|
||||
value = options[option]
|
||||
self._set(sec, option, value, verbose)
|
||||
if save:
|
||||
self._save()
|
||||
|
||||
def _check_section_option(self, section, option):
|
||||
"""
|
||||
Private method to check section and option types
|
||||
"""
|
||||
if section is None:
|
||||
section = self.DEFAULT_SECTION_NAME
|
||||
elif not is_text_string(section):
|
||||
raise RuntimeError("Argument 'section' must be a string")
|
||||
if not is_text_string(option):
|
||||
raise RuntimeError("Argument 'option' must be a string")
|
||||
return section
|
||||
|
||||
def get_default(self, section, option):
|
||||
"""
|
||||
Get Default value for a given (section, option)
|
||||
-> useful for type checking in 'get' method
|
||||
"""
|
||||
section = self._check_section_option(section, option)
|
||||
for sec, options in self.defaults:
|
||||
if sec == section:
|
||||
if option in options:
|
||||
return options[option]
|
||||
else:
|
||||
return NoDefault
|
||||
|
||||
def get(self, section, option, default=NoDefault):
|
||||
"""
|
||||
Get an option
|
||||
section=None: attribute a default section name
|
||||
default: default value (if not specified, an exception
|
||||
will be raised if option doesn't exist)
|
||||
"""
|
||||
section = self._check_section_option(section, option)
|
||||
|
||||
if not self.has_section(section):
|
||||
if default is NoDefault:
|
||||
raise cp.NoSectionError(section)
|
||||
else:
|
||||
self.add_section(section)
|
||||
|
||||
if not self.has_option(section, option):
|
||||
if default is NoDefault:
|
||||
raise cp.NoOptionError(option, section)
|
||||
else:
|
||||
self.set(section, option, default)
|
||||
return default
|
||||
|
||||
value = cp.ConfigParser.get(self, section, option, raw=self.raw)
|
||||
# Use type of default_value to parse value correctly
|
||||
default_value = self.get_default(section, option)
|
||||
if isinstance(default_value, bool):
|
||||
value = ast.literal_eval(value)
|
||||
elif isinstance(default_value, float):
|
||||
value = float(value)
|
||||
elif isinstance(default_value, int):
|
||||
value = int(value)
|
||||
elif is_text_string(default_value):
|
||||
if PY2:
|
||||
try:
|
||||
value = value.decode('utf-8')
|
||||
try:
|
||||
# Some str config values expect to be eval after
|
||||
# decoding
|
||||
new_value = ast.literal_eval(value)
|
||||
if is_text_string(new_value):
|
||||
value = new_value
|
||||
except (SyntaxError, ValueError):
|
||||
pass
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
# lists, tuples, ...
|
||||
value = ast.literal_eval(value)
|
||||
except (SyntaxError, ValueError):
|
||||
pass
|
||||
return value
|
||||
|
||||
def set_default(self, section, option, default_value):
|
||||
"""
|
||||
Set Default value for a given (section, option)
|
||||
-> called when a new (section, option) is set and no default exists
|
||||
"""
|
||||
section = self._check_section_option(section, option)
|
||||
for sec, options in self.defaults:
|
||||
if sec == section:
|
||||
options[option] = default_value
|
||||
|
||||
def set(self, section, option, value, verbose=False, save=True):
|
||||
"""
|
||||
Set an option
|
||||
section=None: attribute a default section name
|
||||
"""
|
||||
section = self._check_section_option(section, option)
|
||||
default_value = self.get_default(section, option)
|
||||
if default_value is NoDefault:
|
||||
# This let us save correctly string value options with
|
||||
# no config default that contain non-ascii chars in
|
||||
# Python 2
|
||||
if is_text_string(value):
|
||||
value = repr(value)
|
||||
default_value = value
|
||||
self.set_default(section, option, default_value)
|
||||
if isinstance(default_value, bool):
|
||||
value = bool(value)
|
||||
elif isinstance(default_value, float):
|
||||
value = float(value)
|
||||
elif isinstance(default_value, int):
|
||||
value = int(value)
|
||||
elif not is_text_string(default_value):
|
||||
value = repr(value)
|
||||
self._set(section, option, value, verbose)
|
||||
if save:
|
||||
self._save()
|
||||
|
||||
def remove_section(self, section):
|
||||
cp.ConfigParser.remove_section(self, section)
|
||||
self._save()
|
||||
|
||||
def remove_option(self, section, option):
|
||||
cp.ConfigParser.remove_option(self, section, option)
|
||||
self._save()
|
20
notify/cli-notify.py
Normal file
20
notify/cli-notify.py
Normal file
@ -0,0 +1,20 @@
|
||||
#!python
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
type = sys.argv[1]
|
||||
message = sys.argv[2]
|
||||
|
||||
if type == "add":
|
||||
print("File " + message + " has been added to your Dropbox.")
|
||||
|
||||
elif type == "rm":
|
||||
print("File " + message + " has been removed from your Dropbox.")
|
||||
|
||||
elif type == "upd":
|
||||
print("File " + message + " has been updated.")
|
||||
|
||||
elif type == "link":
|
||||
print('Link publicly:')
|
||||
print(message)
|
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@ -0,0 +1,5 @@
|
||||
dropbox
|
||||
oauth
|
||||
simplejson
|
||||
watchdog
|
||||
python-dateutil
|
BIN
sysiphusdbx/.DS_Store
vendored
Normal file
BIN
sysiphusdbx/.DS_Store
vendored
Normal file
Binary file not shown.
6
sysiphusdbx/__init__.py
Normal file
6
sysiphusdbx/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from __future__ import absolute_import
|
||||
from . import client, configure, monitor
|
||||
from .configure import Configure
|
||||
from .client import SisyphosClient
|
||||
from .monitor import LocalMonitor, RemoteMonitor
|
||||
from .main import SisyphosDBX
|
616
sysiphusdbx/client.py
Normal file
616
sysiphusdbx/client.py
Normal file
@ -0,0 +1,616 @@
|
||||
import os
|
||||
import os.path as osp
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
import pickle
|
||||
from tqdm import tqdm
|
||||
import shutil
|
||||
import dropbox
|
||||
from dropbox import DropboxOAuth2FlowNoRedirect
|
||||
from dropbox import files
|
||||
from watchdog.utils.dirsnapshot import DirectorySnapshot
|
||||
from watchdog.events import (DirModifiedEvent, FileModifiedEvent,
|
||||
DirCreatedEvent, FileCreatedEvent,
|
||||
DirDeletedEvent, FileDeletedEvent)
|
||||
|
||||
from config.main import CONF, SUBFOLDER
|
||||
from config.base import get_conf_path
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def megabytes_to_bytes(size_mb):
|
||||
"""
|
||||
Convert size in bytes to megabytes
|
||||
"""
|
||||
return size_mb * 1024 * 1024
|
||||
|
||||
|
||||
class OAuth2Session:
|
||||
"""
|
||||
Provides OAuth2 login and token store.
|
||||
"""
|
||||
TOKEN_FILE = osp.join(get_conf_path(SUBFOLDER), "o2_store.txt")
|
||||
auth_flow = None
|
||||
oAuth2FlowResult = None
|
||||
access_token = ""
|
||||
account_id = ""
|
||||
user_id = ""
|
||||
|
||||
def __init__(self, app_key="", app_secret=""):
|
||||
# prepare auth flow
|
||||
self.auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
|
||||
self.load_creds()
|
||||
|
||||
def link(self):
|
||||
authorize_url = self.auth_flow.start()
|
||||
print("1. Go to: " + authorize_url)
|
||||
print("2. Click \"Allow\" (you might have to log in first).")
|
||||
print("3. Copy the authorization code.")
|
||||
auth_code = input("Enter the authorization code here: ").strip()
|
||||
|
||||
try:
|
||||
self.oAuth2FlowResult = self.auth_flow.finish(auth_code)
|
||||
self.access_token = self.oAuth2FlowResult.access_token
|
||||
self.account_id = self.oAuth2FlowResult.account_id
|
||||
self.user_id = self.oAuth2FlowResult.user_id
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return
|
||||
|
||||
self.write_creds()
|
||||
|
||||
def load_creds(self):
|
||||
print(" > Loading access token..."),
|
||||
try:
|
||||
with open(self.TOKEN_FILE) as f:
|
||||
stored_creds = f.read()
|
||||
self.access_token, self.account_id, self.user_id = stored_creds.split('|')
|
||||
print(" [OK]")
|
||||
except IOError:
|
||||
print(" [FAILED]")
|
||||
print(" x Access token not found. Beginning new session.")
|
||||
self.link()
|
||||
|
||||
def write_creds(self):
|
||||
with open(self.TOKEN_FILE, 'w+') as f:
|
||||
f.write("|".join([self.access_token, self.account_id, self.user_id]))
|
||||
|
||||
print(" > Credentials written.")
|
||||
|
||||
def delete_creds(self):
|
||||
os.unlink(self.TOKEN_FILE)
|
||||
print(" > Credentials removed.")
|
||||
|
||||
def unlink(self):
|
||||
self.delete_creds()
|
||||
# I can't unlink the app yet properly (API limitation), so let's just remove the token
|
||||
|
||||
|
||||
class SisyphosClient:
|
||||
APP_KEY = '2jmbq42w7vof78h'
|
||||
APP_SECRET = 'lrsxo47dvuulex5'
|
||||
SDK_VERSION = "2.0"
|
||||
|
||||
exlcuded_files = CONF.get('main', 'exlcuded_files')
|
||||
excluded_folders = CONF.get('main', 'excluded_folders')
|
||||
last_cursor = CONF.get('internal', 'cursor')
|
||||
|
||||
dropbox = None
|
||||
session = None
|
||||
|
||||
def __init__(self):
|
||||
# check if I specified app_key and app_secret
|
||||
if self.APP_KEY == '' or self.APP_SECRET == '':
|
||||
exit(' x You need to set your APP_KEY and APP_SECRET!')
|
||||
|
||||
# get Dropbox session
|
||||
self.session = OAuth2Session(self.APP_KEY, self.APP_SECRET)
|
||||
self.last_longpoll = None
|
||||
self.backoff = 0
|
||||
|
||||
# initialize API client
|
||||
self.dbx = dropbox.Dropbox(self.session.access_token)
|
||||
logger.info(' > SisyphusClient is ready.')
|
||||
|
||||
# get correct directories
|
||||
self.dropbox_path = CONF.get('main', 'path')
|
||||
self.rev_file = osp.join(self.dropbox_path, '.dropbox')
|
||||
# try to load revisions dictionary
|
||||
try:
|
||||
with open(self.rev_file, 'rb') as f:
|
||||
self._rev_dict = pickle.load(f)
|
||||
except FileNotFoundError:
|
||||
self._rev_dict = {}
|
||||
|
||||
def to_dbx_path(self, local_path):
|
||||
"""Returns a relative version of a path, relative to Dropbox folder."""
|
||||
|
||||
if not local_path:
|
||||
raise ValueError("No path specified.")
|
||||
|
||||
start_list = osp.normpath(self.dropbox_path).split(osp.sep)
|
||||
path_list = osp.normpath(local_path).split(osp.sep)
|
||||
|
||||
# Work out how much of the filepath is shared by start and path.
|
||||
i = len(osp.commonprefix([start_list, path_list]))
|
||||
|
||||
rel_list = [osp.pardir] * (len(start_list)-i) + path_list[i:]
|
||||
if not rel_list:
|
||||
raise ValueError("Specified 'path' is not in Dropbox directory.")
|
||||
|
||||
return '/' + '/'.join(rel_list)
|
||||
|
||||
def to_local_path(self, dbx_path):
|
||||
"""Converts a Dropbox folder path the correspoding local path."""
|
||||
|
||||
path = dbx_path.replace('/', osp.sep)
|
||||
path = osp.normpath(path)
|
||||
|
||||
return osp.join(self.dropbox_path, path.lstrip(osp.sep))
|
||||
|
||||
def get_local_rev(self, dbx_path):
|
||||
"""Gets local rev
|
||||
|
||||
Gets revision number for local file.
|
||||
|
||||
:param dbx_path: Dropbox file path
|
||||
:returns: revision str or None if no local revision number saved
|
||||
"""
|
||||
try:
|
||||
with open(self.rev_file, 'rb') as f:
|
||||
self._rev_dict = pickle.load(f)
|
||||
except FileNotFoundError:
|
||||
self._rev_dict = {}
|
||||
|
||||
try:
|
||||
rev = self._rev_dict[dbx_path]
|
||||
except KeyError:
|
||||
rev = None
|
||||
|
||||
return rev
|
||||
|
||||
def set_local_rev(self, dbx_path, rev):
|
||||
"""Sets local rev
|
||||
|
||||
Saves revision number for local file. If rev == None, the entry for the
|
||||
file is removed.
|
||||
|
||||
:param dbx_path: Dropbox file path
|
||||
:param rev: revision str
|
||||
"""
|
||||
if rev is None:
|
||||
self._rev_dict.pop(dbx_path, None)
|
||||
else:
|
||||
self._rev_dict[dbx_path] = rev
|
||||
|
||||
with open(self.rev_file, 'wb+') as f:
|
||||
pickle.dump(self._rev_dict, f, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def unlink(self):
|
||||
"""
|
||||
Kills current Dropbox session. Returns nothing.
|
||||
"""
|
||||
self.dbx.unlink()
|
||||
|
||||
def list_folder(self, folder, **kwargs):
|
||||
"""List a folder.
|
||||
|
||||
Return a dict mapping unicode filenames to
|
||||
FileMetadata|FolderMetadata entries.
|
||||
:param path: Path of folder on Dropbox.
|
||||
:param kwargs: keyword arguments for Dropbox SDK files_list_folder
|
||||
:returns: a dict mapping unicode filenames to
|
||||
FileMetadata|FolderMetadata entries.
|
||||
"""
|
||||
path = osp.normpath(folder)
|
||||
|
||||
try:
|
||||
res = self.dbx.files_list_folder(path, **kwargs)
|
||||
except dropbox.exceptions.ApiError as err:
|
||||
print('Folder listing failed for', path, '-- assumed empty:', err)
|
||||
return {}
|
||||
else:
|
||||
rv = {}
|
||||
for entry in res.entries:
|
||||
rv[entry.name] = entry
|
||||
return rv
|
||||
|
||||
def download(self, dbx_path, **kwargs):
|
||||
""" Downloads file from Dropbox to our local folder.
|
||||
|
||||
Checks for sync conflicts. Downloads file or folder to local Dropbox.
|
||||
|
||||
:param path: path to file on Dropbox
|
||||
:param kwargs: keyword arguments for Dropbox SDK files_download_to_file
|
||||
:returns: metadata or False or None
|
||||
"""
|
||||
# generate local path from dropbox_path and given path parameter
|
||||
dst_path = self.to_local_path(dbx_path)
|
||||
dst_path_directory = osp.dirname(dst_path)
|
||||
|
||||
if not osp.exists(dst_path_directory):
|
||||
os.makedirs(dst_path_directory)
|
||||
|
||||
try:
|
||||
conflict = self._is_local_conflict(dbx_path)
|
||||
except dropbox.exceptions.ApiError as exc:
|
||||
msg = ("An error occurred while getting metadata of file '{0}': "
|
||||
"{2}.".format(dbx_path, exc.error if hasattr(exc, 'error') else exc))
|
||||
logger.warning(msg)
|
||||
return False
|
||||
|
||||
if conflict == 0: # no conflict
|
||||
pass
|
||||
elif conflict == 1: # conflict! rename local file
|
||||
parts = osp.splitext(dst_path)
|
||||
new_local_file = parts[0] + ' (Dropbox conflicting copy)' + parts[1]
|
||||
os.rename(dst_path, new_local_file)
|
||||
elif conflict == 2: # Dropbox files corresponds to local file, nothing to do
|
||||
return None
|
||||
|
||||
try:
|
||||
md = self.dbx.files_download_to_file(dst_path, dbx_path, **kwargs)
|
||||
except (dropbox.exceptions.ApiError, IOError, OSError) as exc:
|
||||
msg = ("An error occurred while downloading '{0}' file as '{1}': "
|
||||
"{2}.".format(
|
||||
dbx_path, dst_path,
|
||||
exc.error if hasattr(exc, 'error') else exc))
|
||||
logger.warning(msg)
|
||||
return False
|
||||
|
||||
msg = ("File '{0}' (rev={1}) from '{2}' was successfully downloaded as '{3}'.\n".format(
|
||||
md.name, md.rev, md.path_display, dst_path))
|
||||
|
||||
self.set_local_rev(md.path_display, md.rev) # save revision metadata
|
||||
logger.info(msg)
|
||||
|
||||
return md
|
||||
|
||||
def upload(self, file_src, path, chunk_size=2, **kwargs):
|
||||
"""
|
||||
Uploads local file to Dropbox.
|
||||
:param file: file to upload, bytes
|
||||
:param path: path to file on Dropbox
|
||||
:param kwargs: keyword arguments for Dropbox SDK files_upload
|
||||
:param chunk_size: Maximum size for individual uploads in MB. If the
|
||||
file size exceeds the chunk_size, an upload-session is created instead.
|
||||
:returns: metadata or False
|
||||
"""
|
||||
|
||||
file_size = osp.getsize(file_src)
|
||||
chunk_size = megabytes_to_bytes(chunk_size)
|
||||
|
||||
pb = tqdm(total=file_size, unit="B", unit_scale=True,
|
||||
desc=osp.basename(file_src), miniters=1,
|
||||
ncols=80, mininterval=1)
|
||||
mtime = os.path.getmtime(file_src)
|
||||
mtime_dt = datetime.datetime(*time.gmtime(mtime)[:6])
|
||||
|
||||
try:
|
||||
with open(file_src, 'rb') as f:
|
||||
if file_size <= chunk_size:
|
||||
md = self.dbx.files_upload(
|
||||
f.read(), path, client_modified=mtime_dt, **kwargs)
|
||||
else:
|
||||
session_start = self.dbx.files_upload_session_start(
|
||||
f.read(chunk_size))
|
||||
cursor = files.UploadSessionCursor(
|
||||
session_id=session_start.session_id, offset=f.tell())
|
||||
commit = files.CommitInfo(
|
||||
path=path, client_modified=mtime, **kwargs)
|
||||
while f.tell() < file_size:
|
||||
pb.update(chunk_size)
|
||||
if file_size - f.tell() <= chunk_size:
|
||||
pb.update(file_size - f.tell())
|
||||
md = self.dbx.files_upload_session_finish(
|
||||
f.read(chunk_size), cursor, commit)
|
||||
else:
|
||||
self.dbx.files_upload_session_append_v2(
|
||||
f.read(chunk_size), cursor)
|
||||
cursor.offset = f.tell()
|
||||
except dropbox.exceptions.ApiError as exc:
|
||||
msg = "An error occurred while uploading '{0}': {1}.".format(
|
||||
file_src, exc.error.get_path().reason)
|
||||
logger.warning(msg)
|
||||
return False
|
||||
finally:
|
||||
pb.close()
|
||||
|
||||
self.set_local_rev(md.path_display, md.rev) # save revision metadata
|
||||
logger.info("File uploaded properly.")
|
||||
return md
|
||||
|
||||
def remove(self, path, **kwargs):
|
||||
"""
|
||||
Removes file from Dropbox.
|
||||
:param path: path to file on Dropbox
|
||||
:param kwargs: keyword arguments for Dropbox SDK files_delete
|
||||
:returns: metadata or False
|
||||
"""
|
||||
try:
|
||||
# try to move file (response will be metadata, probably)
|
||||
md = self.dbx.files_delete(path, **kwargs)
|
||||
except dropbox.exceptions.HttpError as err:
|
||||
logger.warning(' x HTTP error', err)
|
||||
return False
|
||||
except dropbox.exceptions.ApiError as err:
|
||||
logger.warning(' x API error', err)
|
||||
return False
|
||||
|
||||
# remove revision metadata
|
||||
self.set_local_rev(md.path_display, None)
|
||||
|
||||
return md
|
||||
|
||||
def move(self, path, new_path):
|
||||
"""
|
||||
Moves/renames files or folders on Dropbox
|
||||
:param path: path to file /folder on Dropbox
|
||||
:param new_path: new name/path
|
||||
:returns: metadata or False
|
||||
"""
|
||||
try:
|
||||
# try to move file (response will be metadata, probably)
|
||||
md = self.dbx.files_move(path, new_path, allow_shared_folder=True,
|
||||
autorename=True, allow_ownership_transfer=True)
|
||||
except dropbox.exceptions.HttpError as err:
|
||||
logger.warning(' x HTTP error', err)
|
||||
return False
|
||||
except dropbox.exceptions.ApiError as err:
|
||||
logger.warning(' x API error', err)
|
||||
return False
|
||||
|
||||
# update local revs
|
||||
self.set_local_rev(path, None)
|
||||
self.set_local_rev(new_path, md.rev)
|
||||
return md
|
||||
|
||||
def make_dir(self, path, **kwargs):
|
||||
"""
|
||||
Creates folder on Dropbox
|
||||
:param path: path to file /folder on Dropbox
|
||||
:param kwargs: keyword arguments for Dropbox SDK files_create_folder
|
||||
:returns: metadata or False
|
||||
"""
|
||||
try:
|
||||
md = self.dbx.files_create_folder(path, **kwargs)
|
||||
except dropbox.exceptions.ApiError as err:
|
||||
logger.warning(' x API error', err)
|
||||
return False
|
||||
|
||||
self.set_local_rev(path, 'folder')
|
||||
return md
|
||||
|
||||
def get_remote_dropbox(self, path=""):
|
||||
"""
|
||||
Gets all files/folders from dropbox and writes them to local folder.
|
||||
Call this method on first run of client. Indexing and downloading may
|
||||
take some time, depdning on the size of the users Dropbox folder.
|
||||
|
||||
:param path: path to folder on Dropbox, defaults to root
|
||||
:returns: True on success, False otherwise
|
||||
"""
|
||||
results = [0] # list to store all results
|
||||
|
||||
try: # get metadata of all remote folders and files
|
||||
results[0] = self.dbx.files_list_folder(path, recursive=True,
|
||||
include_deleted=False)
|
||||
except dropbox.exceptions.ApiError as exc:
|
||||
msg = "Cannot access '{0}': {1}".format(path, exc.error.get_path())
|
||||
logger.warning(msg)
|
||||
return False
|
||||
|
||||
while results[-1].has_more: # check if there is any more
|
||||
logger.info("Indexing %s" % len(results[-1].entries))
|
||||
more_results = self.dbx.files_list_folder_continue(results[-1].cursor)
|
||||
results.append(more_results)
|
||||
|
||||
for result in results:
|
||||
for entry in result.entries:
|
||||
self._create_local_entry(entry)
|
||||
|
||||
self.last_cursor = result.cursor
|
||||
CONF.set('internal', 'cursor', self.last_cursor)
|
||||
|
||||
CONF.set('internal', 'lastsync', time.time())
|
||||
|
||||
return True
|
||||
|
||||
def get_local_changes(self):
|
||||
"""Gets all local changes while app has not been running.
|
||||
|
||||
Call this method on startup of client to upload all local changes.
|
||||
|
||||
:returns: dictionary with all changes, keys are file paths relative to
|
||||
local Dropbox folder, entries are file changed event types
|
||||
corresponding to watchdog.
|
||||
"""
|
||||
changes = []
|
||||
snapshot = DirectorySnapshot(self.dropbox_path)
|
||||
|
||||
# get paths of modified or added files / folders
|
||||
for path in snapshot.paths:
|
||||
dbx_path = self.to_dbxd_path(path)
|
||||
if snapshot.mtime(path) > CONF.get('internal', 'lastsync'):
|
||||
if path in self._rev_dict: # file is already tracked
|
||||
if osp.isdir(path):
|
||||
event = DirModifiedEvent(path)
|
||||
else:
|
||||
event = FileModifiedEvent(path)
|
||||
changes.append(event)
|
||||
elif not self._is_excluded(dbx_path):
|
||||
if osp.isdir(path):
|
||||
event = DirCreatedEvent(path)
|
||||
else:
|
||||
event = FileCreatedEvent(path)
|
||||
changes.append(event)
|
||||
|
||||
# get deleted files / folders
|
||||
for path in self._rev_dict.keys():
|
||||
if path not in snapshot.paths:
|
||||
if path.endswith('/'):
|
||||
event = DirDeletedEvent(path)
|
||||
else:
|
||||
event = FileDeletedEvent(path)
|
||||
changes.append(event)
|
||||
|
||||
return changes
|
||||
|
||||
def wait_for_remote_changes(self, timeout=120):
|
||||
"""Waits for remote changes since self.last_cursor.
|
||||
|
||||
Waits for remote changes since self.last_cursor. Call this method after
|
||||
starting the Dropbox client and periodically to get the latest updates.
|
||||
|
||||
:param timeout: seconds to wait untill timeout
|
||||
"""
|
||||
# honour last request to back off
|
||||
if self.last_longpoll is not None:
|
||||
while time.time() - self.last_longpoll < self.backoff:
|
||||
time.sleep(1)
|
||||
|
||||
try: # get metadata of all remote folders and files
|
||||
result = self.dbx.files_list_folder_longpoll(self.last_cursor, timeout=timeout)
|
||||
|
||||
except dropbox.exceptions.ApiError:
|
||||
msg = "Cannot access Dropbox folder."
|
||||
logger.warning(msg)
|
||||
return False
|
||||
|
||||
# keep track of last long poll, back off if requested by SDK
|
||||
if result.backoff:
|
||||
self.backoff = result.backoff + 5
|
||||
else:
|
||||
self.backoff = 0
|
||||
|
||||
self.last_longpoll = time.time()
|
||||
|
||||
return result.changes
|
||||
|
||||
def get_remote_changes(self):
|
||||
"""
|
||||
Applies remote changes since self.last_cursor.
|
||||
:param timeout: seconds to wait untill timeout
|
||||
:returns: True on success, False otherwise
|
||||
"""
|
||||
|
||||
results = [0]
|
||||
|
||||
results[0] = self.dbx.files_list_folder_continue(self.last_cursor)
|
||||
|
||||
while results[-1].has_more:
|
||||
result = self.dbx.files_list_folder_continue(results[-1].cursor)
|
||||
results.append(result)
|
||||
|
||||
for result in results:
|
||||
for entry in result.entries:
|
||||
self._create_local_entry(entry)
|
||||
|
||||
self.last_cursor = result.cursor
|
||||
CONF.set('internal', 'cursor', self.last_cursor)
|
||||
CONF.set('internal', 'lastsync', time.time())
|
||||
|
||||
return True
|
||||
|
||||
def _create_local_entry(self, entry):
|
||||
"""Creates local file / folder for remote entry
|
||||
:param entry:
|
||||
"""
|
||||
|
||||
self.excluded_folders = CONF.get('main', 'excluded_folders')
|
||||
|
||||
if self._is_excluded(entry.path_display):
|
||||
return
|
||||
|
||||
elif isinstance(entry, files.FileMetadata):
|
||||
# Store the new entry at the given path in your local state.
|
||||
# If the required parent folders don’t exist yet, create them.
|
||||
# If there’s already something else at the given path,
|
||||
# replace it and remove all its children.
|
||||
|
||||
self.download(entry.path_display)
|
||||
|
||||
elif isinstance(entry, files.FolderMetadata):
|
||||
# Store the new entry at the given path in your local state.
|
||||
# If the required parent folders don’t exist yet, create them.
|
||||
# If there’s already something else at the given path,
|
||||
# replace it but leave the children as they are.
|
||||
|
||||
dst_path = self.to_local_path(entry.path_display)
|
||||
|
||||
if not osp.isdir(dst_path):
|
||||
os.makedirs(dst_path)
|
||||
|
||||
self.set_local_rev(entry.path_display, 'folder')
|
||||
|
||||
elif isinstance(entry, files.DeletedMetadata):
|
||||
# If your local state has something at the given path,
|
||||
# remove it and all its children. If there’s nothing at the
|
||||
# given path, ignore this entry.
|
||||
|
||||
dst_path = self.to_local_path(entry.path_display)
|
||||
|
||||
if osp.isdir(dst_path):
|
||||
shutil.rmtree(dst_path)
|
||||
elif osp.isfile(dst_path):
|
||||
os.remove(dst_path)
|
||||
|
||||
self.set_local_rev(entry.path_display, None)
|
||||
|
||||
def _is_excluded(self, path):
|
||||
"""Check if file is excluded from sync
|
||||
:param path: Path of folder on Dropbox.
|
||||
:returns: True or False (bool)
|
||||
"""
|
||||
excluded = False
|
||||
if os.path.basename(path) in self.exlcuded_files:
|
||||
excluded = True
|
||||
|
||||
for excluded_folder in self.excluded_folders:
|
||||
if not os.path.commonpath([path, excluded_folder]) in ["/", ""]:
|
||||
excluded = True
|
||||
|
||||
return excluded
|
||||
|
||||
def _is_local_conflict(self, dbx_path):
|
||||
"""Check if local copy is conflicting with remote.
|
||||
|
||||
:param dbx_path: Path of folder on Dropbox.
|
||||
:returns: 0 for conflict, 1 for no conflict, 2 for files are identical
|
||||
"""
|
||||
# get corresponding local path
|
||||
dst_path = self.to_local_path(dbx_path)
|
||||
|
||||
# no conflict if local file does not exist yet
|
||||
if not osp.exists(dst_path):
|
||||
logger.info("Local file '%s' does not exist. No conflict.", dbx_path)
|
||||
return 0
|
||||
|
||||
# get metadata otherwise
|
||||
md = self.dbx.files_get_metadata(dbx_path)
|
||||
|
||||
# check if Dropbox rev is in local dict
|
||||
local_rev = self.get_local_rev(dbx_path)
|
||||
if local_rev is None:
|
||||
# If no, we have a conflict: files with the same name have been
|
||||
# created on Dropbox and locally inpedent from each other
|
||||
# If is file has been modified while the client was not running,
|
||||
# its entry from files_rev_dict is removed.
|
||||
logger.info("Conflicting local file without rev.")
|
||||
return 1
|
||||
# check if remote and local versions have same rev
|
||||
elif md.rev == local_rev:
|
||||
logger.info(
|
||||
"Local file is the same as on Dropbox (rev %s). No download necessary.",
|
||||
local_rev)
|
||||
return 2 # files are already the same
|
||||
|
||||
elif not md.rev == local_rev:
|
||||
# we are dealing with different revisions, trust the Dropbox server version
|
||||
logger.info(
|
||||
"Local file has rev %s, file on Dropbox has rev %s. Getting file from Dropbox.",
|
||||
local_rev, md.rev)
|
||||
return 0
|
88
sysiphusdbx/configure.py
Normal file
88
sysiphusdbx/configure.py
Normal file
@ -0,0 +1,88 @@
|
||||
|
||||
"""
|
||||
1. Establish session, get token if necessary
|
||||
2. Configure local Dropbox folder, set firstsync = True if new folder is given
|
||||
3. Configure excluded foldes
|
||||
|
||||
"""
|
||||
import os
|
||||
import os.path as osp
|
||||
from dropbox import files
|
||||
from config.main import CONF
|
||||
from config.base import get_home_dir
|
||||
|
||||
|
||||
class Configure():
|
||||
|
||||
def __init__(self, client):
|
||||
|
||||
self.client = client
|
||||
|
||||
def set_dropbox_directory(self):
|
||||
"""
|
||||
Configure dropbox directory. Will trigger a full indexing on next sync.
|
||||
"""
|
||||
|
||||
def ask_for_path():
|
||||
dropbox_path = input("Dropbox folder location:").strip().strip("'")
|
||||
dropbox_path = osp.abspath(dropbox_path)
|
||||
|
||||
if dropbox_path == "":
|
||||
dropbox_path = osp.join(get_home_dir(), 'Dropbox')
|
||||
|
||||
if not osp.exists(dropbox_path):
|
||||
msg = "Dropbox folder does not exist. Should we create?"
|
||||
yes = yesno(msg, True)
|
||||
if yes:
|
||||
os.makedirs(dropbox_path)
|
||||
else:
|
||||
dropbox_path = ask_for_path()
|
||||
|
||||
return dropbox_path
|
||||
|
||||
CONF.set('main', 'path', ask_for_path())
|
||||
CONF.set('main', 'lastsync', False)
|
||||
|
||||
def ask_for_excluded_folders(self):
|
||||
|
||||
folders = []
|
||||
|
||||
result = self.client.dbx.files_list_folder("", recursive=False)
|
||||
|
||||
for entry in result.entries:
|
||||
if isinstance(entry, files.FolderMetadata):
|
||||
yes = yesno("Exclude '%s' from sync?" % entry.path_display, False)
|
||||
if yes:
|
||||
folders.append(entry.path_display)
|
||||
|
||||
CONF.set('main', 'excluded_folders', folders)
|
||||
|
||||
|
||||
def yesno(message, default):
|
||||
"""Handy helper function to ask a yes/no question.
|
||||
A blank line returns the default, and answering
|
||||
y/yes or n/no returns True or False.
|
||||
Retry on unrecognized answer.
|
||||
Special answers:
|
||||
- q or quit exits the program
|
||||
- p or pdb invokes the debugger
|
||||
"""
|
||||
if default:
|
||||
message += ' [Y/n] '
|
||||
else:
|
||||
message += ' [N/y] '
|
||||
while True:
|
||||
answer = input(message).strip().lower()
|
||||
if not answer:
|
||||
return default
|
||||
if answer in ('y', 'yes'):
|
||||
return True
|
||||
if answer in ('n', 'no'):
|
||||
return False
|
||||
if answer in ('q', 'quit'):
|
||||
print('Exit')
|
||||
raise SystemExit(0)
|
||||
if answer in ('p', 'pdb'):
|
||||
import pdb
|
||||
pdb.set_trace()
|
||||
print('Please answer YES or NO.')
|
82
sysiphusdbx/main.py
Normal file
82
sysiphusdbx/main.py
Normal file
@ -0,0 +1,82 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os.path as osp
|
||||
import shutil
|
||||
from sysiphusdbx import SisyphusClient, LocalMonitor, RemoteMonitor, Configure
|
||||
from config.main import CONF
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(logging.StreamHandler())
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class SisyphosDBX(object):
|
||||
|
||||
FIRST_SYNC = (not CONF.get('internal', 'lastsync') or
|
||||
CONF.get('internal', 'cursor') == '' or
|
||||
not osp.isdir(CONF.get('sysiphusdbx', 'path')))
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.client = SisyphusClient()
|
||||
|
||||
def on_firstsync(self):
|
||||
|
||||
self.configure = Configure(self.client)
|
||||
self.configure.ask_for_excluded_folders()
|
||||
CONF.set('internal', 'cursor', '')
|
||||
CONF.set('internal', 'lastsync', None)
|
||||
|
||||
result = False
|
||||
while not result:
|
||||
result = self.client.get_remote_dropbox()
|
||||
|
||||
def start_sync(self):
|
||||
|
||||
if self.FIRST_SYNC:
|
||||
self.on_firstsync()
|
||||
|
||||
self.remote = RemoteMonitor(self.client)
|
||||
self.local = LocalMonitor(self.client, self.remote)
|
||||
|
||||
self.local.upload_local_changes_after_inactive()
|
||||
|
||||
self.remote.start()
|
||||
self.local.start()
|
||||
|
||||
def stop_sync(self):
|
||||
|
||||
self.remote.stop()
|
||||
self.local.stop()
|
||||
|
||||
def exclude_folder(self, dbx_path):
|
||||
|
||||
# add folder's Dropbox path to excluded list
|
||||
folders = CONF.get('main', 'excluded_folders')
|
||||
if dbx_path not in folders:
|
||||
folders.append(dbx_path)
|
||||
|
||||
self.client.excluded_folders = folders
|
||||
CONF.set('main', 'excluded_folders', folders)
|
||||
|
||||
# remove folder from local drive
|
||||
local_path = self.client.to_local_path(dbx_path)
|
||||
if osp.isdir(local_path):
|
||||
shutil.rmtree(local_path)
|
||||
|
||||
self.set_local_rev(dbx_path, None)
|
||||
|
||||
def include_folder(self, dbx_path):
|
||||
|
||||
# remove folder's Dropbox path from excluded list
|
||||
folders = CONF.get('main', 'excluded_folders')
|
||||
if dbx_path in folders:
|
||||
new_folders = [x for x in folders if osp.normpath(x) == dbx_path]
|
||||
|
||||
self.client.excluded_folders = new_folders
|
||||
CONF.set('main', 'excluded_folders', new_folders)
|
||||
|
||||
# download folder and contents from Dropbox
|
||||
self.client.get_remote_dropbox(path=dbx_path)
|
290
sysiphusdbx/monitor.py
Normal file
290
sysiphusdbx/monitor.py
Normal file
@ -0,0 +1,290 @@
|
||||
import os.path as osp
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
from dropbox import files
|
||||
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
|
||||
from config.main import CONF, SUBFOLDER
|
||||
from config.base import get_conf_path
|
||||
|
||||
configurationDirectory = get_conf_path(SUBFOLDER)
|
||||
dropbox_path = CONF.get('main', 'path')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
lock = threading.RLock()
|
||||
|
||||
EXCLUDED_FILES = CONF.get('main', 'exlcuded_files')
|
||||
EXCLUDED_FOLDERS = CONF.get('main', 'excluded_folders')
|
||||
|
||||
|
||||
def is_excluded(dbx_path):
|
||||
"""Check if file is excluded from sync
|
||||
|
||||
Checks if a file or folder has been excluded by the user, or if it is
|
||||
temporary and created only during a save event.
|
||||
:param dbx_path: string containing Dropbox path
|
||||
:returns: True if file excluded, False otherwise.
|
||||
"""
|
||||
excluded = False
|
||||
if osp.basename(dbx_path) in EXCLUDED_FILES:
|
||||
excluded = True
|
||||
|
||||
for excluded_folder in EXCLUDED_FOLDERS:
|
||||
if not osp.commonpath([dbx_path, excluded_folder]) in [osp.sep, ""]:
|
||||
excluded = True
|
||||
|
||||
if dbx_path.count('.') > 1: # ignore ephemeral files on macOS
|
||||
excluded = True
|
||||
|
||||
return excluded
|
||||
|
||||
|
||||
def local_sync(func):
|
||||
"""Wrapper for methods and detect and sync local file changes.
|
||||
|
||||
- Aborts if file or folder has been excluded by user, or if file temporary
|
||||
and created only during a save event.
|
||||
- Pauses the remote monitor for the duration of the local sync / upload.
|
||||
- Updates the lastsync time in the config file.
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
if is_excluded(args[1].src_path):
|
||||
return
|
||||
|
||||
if hasattr(args[1], 'dst_path'):
|
||||
if is_excluded(args[1].dst_path):
|
||||
return
|
||||
|
||||
print('syncing...')
|
||||
args[0].remote_monitor.stop()
|
||||
with lock:
|
||||
result = func(*args, **kwargs)
|
||||
args[0].remote_monitor.start()
|
||||
CONF.set('internal', 'lastsync', time.time())
|
||||
print('done')
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
class LoggingEventHandler(FileSystemEventHandler):
|
||||
"""Logs all the events captured."""
|
||||
|
||||
def on_moved(self, event):
|
||||
super(LoggingEventHandler, self).on_moved(event)
|
||||
logger.info("Move detected: from '%s' to '%s'", event.src_path, event.dest_path)
|
||||
|
||||
def on_created(self, event):
|
||||
super(LoggingEventHandler, self).on_created(event)
|
||||
logger.info("Creation detected: '%s'", event.src_path)
|
||||
|
||||
def on_deleted(self, event):
|
||||
super(LoggingEventHandler, self).on_deleted(event)
|
||||
logger.info("Deletion detected: '%s'", event.src_path)
|
||||
|
||||
def on_modified(self, event):
|
||||
super(LoggingEventHandler, self).on_modified(event)
|
||||
logger.info("Modification detected: '%s'", event.src_path)
|
||||
|
||||
|
||||
class DropboxEventHandler(LoggingEventHandler):
|
||||
"""Logs all the events captured."""
|
||||
|
||||
def __init__(self, client, remote_monitor):
|
||||
|
||||
self.client = client
|
||||
self.remote_monitor = remote_monitor
|
||||
|
||||
@local_sync
|
||||
def on_moved(self, event):
|
||||
super(LoggingEventHandler, self).on_moved(event)
|
||||
|
||||
path = event.src_path
|
||||
path2 = event.dest_path
|
||||
|
||||
dbx_path = self.client.to_dbx_path(path)
|
||||
dbx_path2 = self.client.to_dbx_path(path2)
|
||||
|
||||
# If the file name contains multiple periods it is likely a temporary
|
||||
# file created during a saving event on macOS. Irgnore such files.
|
||||
if osp.basename(path2).count('.') > 1:
|
||||
return
|
||||
|
||||
self.client.move(dbx_path, dbx_path2)
|
||||
|
||||
what = 'directory' if event.is_directory else 'file'
|
||||
logger.info("Moved %s: from %s to %s", what, dbx_path, dbx_path2)
|
||||
|
||||
@local_sync
|
||||
def on_created(self, event):
|
||||
super(LoggingEventHandler, self).on_created(event)
|
||||
|
||||
what = 'directory' if event.is_directory else 'file'
|
||||
|
||||
path = event.src_path
|
||||
dbx_path = self.client.to_dbx_path(path)
|
||||
|
||||
if what == 'file':
|
||||
|
||||
if osp.isfile(path):
|
||||
while True: # wait until file is fully created
|
||||
size1 = osp.getsize(path)
|
||||
time.sleep(0.5)
|
||||
size2 = osp.getsize(path)
|
||||
if size1 == size2:
|
||||
break
|
||||
|
||||
rev = self.client.get_local_rev(dbx_path)
|
||||
# if truly a new file
|
||||
if rev is None:
|
||||
mode = files.WriteMode('add')
|
||||
# or a 'flase' new file event triggered by modification
|
||||
# e.g., some programms create backup files and then swap them
|
||||
# in to replace the files you are editing on the disk
|
||||
else:
|
||||
mode = files.WriteMode('update', rev)
|
||||
md = self.client.upload(path, dbx_path, autorename=True, mode=mode)
|
||||
|
||||
logger.info("Created %s: %s (rev %s)", what, md.path_display, md.rev)
|
||||
|
||||
else:
|
||||
what = 'directory' if event.is_directory else 'file'
|
||||
if what == 'directory':
|
||||
md = self.client.make_dir(dbx_path, autorename=True)
|
||||
logger.info("Created %s: %s", what, md.path_display)
|
||||
|
||||
@local_sync
|
||||
def on_deleted(self, event):
|
||||
super(LoggingEventHandler, self).on_deleted(event)
|
||||
|
||||
path = event.src_path
|
||||
dbx_path = self.client.to_dbx_path(path)
|
||||
what = 'directory' if event.is_directory else 'file'
|
||||
md = self.client.remove(dbx_path)
|
||||
logger.info("Deleted %s.", what)
|
||||
|
||||
@local_sync
|
||||
def on_modified(self, event):
|
||||
super(LoggingEventHandler, self).on_modified(event)
|
||||
|
||||
what = 'directory' if event.is_directory else 'file'
|
||||
path = event.src_path
|
||||
dbx_path = self.client.to_dbx_path(path)
|
||||
|
||||
if what == "file":
|
||||
if osp.isfile(path):
|
||||
|
||||
while True: # wait until file is fully created
|
||||
size1 = osp.getsize(path)
|
||||
time.sleep(0.2)
|
||||
size2 = osp.getsize(path)
|
||||
if size1 == size2:
|
||||
break
|
||||
|
||||
rev = self.client.get_local_rev(dbx_path)
|
||||
mode = files.WriteMode('update', rev)
|
||||
md = self.client.upload(path, dbx_path, autorename=True, mode=mode)
|
||||
logger.info("Modified %s: %s (old rev: %s, new rev %s)", what,
|
||||
md.path_display, rev, md.rev)
|
||||
|
||||
|
||||
class GetRemoteChangesThread(threading.Thread):
|
||||
|
||||
pause_event = threading.Event()
|
||||
stop_event = threading.Event()
|
||||
|
||||
def __init__(self, client):
|
||||
super(self.__class__, self).__init__()
|
||||
self.client = client
|
||||
|
||||
def run(self):
|
||||
while not self.stop_event.is_set():
|
||||
while self.pause_event.is_set():
|
||||
time.sleep(0.1)
|
||||
changes = self.client.wait_for_remote_changes()
|
||||
while self.pause_event.is_set():
|
||||
time.sleep(0.1)
|
||||
|
||||
if changes:
|
||||
with lock:
|
||||
self.client.get_remote_changes()
|
||||
|
||||
def pause(self):
|
||||
self.pause_event.set()
|
||||
|
||||
def resume(self):
|
||||
self.pause_event.clear()
|
||||
|
||||
def stop(self):
|
||||
self.stop_event.set()
|
||||
|
||||
|
||||
class RemoteDummy(object):
|
||||
|
||||
def start(self):
|
||||
pass
|
||||
|
||||
def stop(self):
|
||||
pass
|
||||
|
||||
|
||||
class RemoteMonitor(object):
|
||||
|
||||
def __init__(self, client):
|
||||
|
||||
self.client = client
|
||||
|
||||
self.thread = GetRemoteChangesThread(self.client)
|
||||
self.thread.pause()
|
||||
self.thread.start()
|
||||
|
||||
def start(self):
|
||||
"""Start observation of remote Dropbox folder."""
|
||||
self.thread.resume()
|
||||
|
||||
def stop(self):
|
||||
"""Stop observation of remote Dropbox folder."""
|
||||
self.thread.pause()
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.thread.stop()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
class LocalMonitor(object):
|
||||
|
||||
def __init__(self, client, remote_monitor=RemoteDummy()):
|
||||
|
||||
self.client = client
|
||||
self.remote_monitor = remote_monitor
|
||||
|
||||
self.event_handler = DropboxEventHandler(self.client, self.remote_monitor)
|
||||
|
||||
def upload_local_changes_after_inactive(self):
|
||||
"""Push changes while client has not been running to Dropbox."""
|
||||
|
||||
events = self.client.get_local_changes()
|
||||
|
||||
for event in events:
|
||||
if event.event_type is 'created':
|
||||
self.event_handler.on_created(event)
|
||||
elif event.event_type is 'deleted':
|
||||
self.event_handler.on_deleted(event)
|
||||
elif event.event_type is 'modified':
|
||||
self.event_handler.on_modified(event)
|
||||
|
||||
def start(self):
|
||||
"""Start observation of local Dropbox folder."""
|
||||
self.observer = Observer()
|
||||
self.observer.schedule(self.event_handler, dropbox_path, recursive=True)
|
||||
self.observer.start()
|
||||
|
||||
def stop(self):
|
||||
"""Stop observation of local Dropbox folder."""
|
||||
self.observer.stop()
|
||||
self.observer.join()
|
Loading…
Reference in New Issue
Block a user