3 patches for repository http://tahoe-lafs.org/source/tahoe-lafs/trunk:
Wed Jul 27 01:05:41 BST 2011 david-sarah@jacaranda.org
* Drop upload frontend (updated), with more tests. Tests now pass on Windows. refs #1429
Wed Jul 27 03:30:03 BST 2011 david-sarah@jacaranda.org
* drop-upload: make counts visible on the statistics page, and disable some debugging. refs #1429
Sun Aug 7 20:22:50 BST 2011 david-sarah@jacaranda.org
* Prototype Windows implementation of drop-uploader. refs #1431
New patches:
[Drop upload frontend (updated), with more tests. Tests now pass on Windows. refs #1429
david-sarah@jacaranda.org**20110727000541
Ignore-this: d67c37a4db86c3d37a1c4b16ff299df5
] {
hunk ./src/allmydata/_auto_deps.py 22
"zope.interface == 3.3.1, == 3.5.3, == 3.6.1",
# On Windows we need at least Twisted 9.0 to avoid an indirect dependency on pywin32.
+ # On Linux we need at least Twisted 10.1.0 for inotify support used by the drop-upload
+ # frontend.
# We also need Twisted 10.1 for the FTP frontend in order for Twisted's FTP server to
# support asynchronous close.
"Twisted >= 10.1.0",
hunk ./src/allmydata/client.py 153
# ControlServer and Helper are attached after Tub startup
self.init_ftp_server()
self.init_sftp_server()
+ self.init_drop_uploader()
hotline_file = os.path.join(self.basedir,
self.SUICIDE_PREVENTION_HOTLINE_FILE)
hunk ./src/allmydata/client.py 425
sftp_portstr, pubkey_file, privkey_file)
s.setServiceParent(self)
+ def init_drop_uploader(self):
+ if self.get_config("drop_upload", "enabled", False, boolean=True):
+ upload_uri = self.get_config("drop_upload", "upload.uri", None)
+ local_dir_utf8 = self.get_config("drop_upload", "local.directory", None)
+
+ if upload_uri and local_dir_utf8:
+ try:
+ from allmydata.frontends import drop_upload
+ s = drop_upload.DropUploader(self, upload_uri, local_dir_utf8)
+ s.setServiceParent(self)
+ except Exception, e:
+ self.log("couldn't start drop-uploader: %r", args=(e,))
+ else:
+ self.log("couldn't start drop-uploader: upload.uri or local.directory not specified")
+
def _check_hotline(self, hotline_file):
if os.path.exists(hotline_file):
mtime = os.stat(hotline_file)[stat.ST_MTIME]
addfile ./src/allmydata/frontends/drop_upload.py
hunk ./src/allmydata/frontends/drop_upload.py 1
+
+import os, sys
+
+from twisted.internet import defer
+from twisted.python.filepath import FilePath
+from twisted.application import service
+
+from allmydata.interfaces import IDirectoryNode
+
+from allmydata.util.encodingutil import quote_output
+from allmydata.immutable.upload import FileName
+
+
+class DropUploader(service.MultiService):
+ def __init__(self, client, upload_uri, local_dir_utf8, inotify=None):
+ service.MultiService.__init__(self)
+
+ try:
+ local_dir = os.path.expanduser(local_dir_utf8.decode('utf-8').encode(sys.getfilesystemencoding()))
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ raise AssertionError("The drop-upload path %r was not valid UTF-8 or could not be represented in the filesystem encoding."
+ % quote_output(local_dir_utf8))
+
+ self._client = client
+ self._convergence = client.convergence
+ self._local_path = FilePath(local_dir)
+ self.uploaded = 0
+ self.failed = 0
+ self.disappeared = 0
+
+ if inotify is None:
+ from twisted.internet import inotify
+ self._inotify = inotify
+
+ if not self._local_path.isdir():
+ raise AssertionError("The drop-upload local path %r was not an existing directory." % quote_output(local_dir))
+
+ # TODO: allow a path rather than an URI.
+ self._parent = self._client.create_node_from_uri(upload_uri)
+ if not IDirectoryNode.providedBy(self._parent):
+ raise AssertionError("The drop-upload remote URI is not a directory URI.")
+ if self._parent.is_unknown() or self._parent.is_readonly():
+ raise AssertionError("The drop-upload remote URI does not refer to a writeable directory.")
+
+ self._uploaded_callback = lambda ign: None
+
+ self._notifier = inotify.INotify()
+ self._notifier.startReading()
+
+ # We don't watch for IN_CREATE, because that would cause us to read and upload a
+ # possibly-incomplete file before the application has closed it. There should always
+ # be an IN_CLOSE_WRITE after an IN_CREATE (I think).
+ # TODO: what about IN_MOVE_SELF?
+ mask = inotify.IN_CLOSE_WRITE | inotify.IN_MOVED_TO
+ self._notifier.watch(self._local_path, mask=mask, callbacks=[self._notify])
+
+ def _notify(self, opaque, path, events_mask):
+ self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask))))
+
+ d = defer.succeed(None)
+
+ # FIXME: if this already exists as a mutable file, we replace the directory entry,
+ # but we should probably modify the file (as the SFTP frontend does).
+ def _add_file(ign):
+ name = path.basename().decode(sys.getfilesystemencoding())
+ u = FileName(path.path, self._convergence)
+ return self._parent.add_file(name, u)
+ d.addCallback(_add_file)
+
+ def _succeeded(ign):
+ self.uploaded += 1
+ def _failed(f):
+ if path.exists():
+ self._log("drop-upload: %r failed to upload due to %r" % (path.path, f))
+ self.failed += 1
+ return f
+ else:
+ self._log("drop-upload: notified file %r disappeared "
+ "(this is normal for temporary files): %r" % (path.path, f))
+ self.disappeared += 1
+ return None
+ d.addCallbacks(_succeeded, _failed)
+ d.addBoth(self._uploaded_callback)
+ return d
+
+ def set_uploaded_callback(self, callback):
+ """This sets a function that will be called after a file has been uploaded."""
+ self._uploaded_callback = callback
+
+ def finish(self):
+ self._notifier.stopReading()
+
+ def _log(self, msg):
+ self._client.log(msg)
+ open("events", "ab+").write(msg)
hunk ./src/allmydata/scripts/create_node.py 155
c.write("enabled = false\n")
c.write("\n")
+ c.write("[drop_upload]\n")
+ c.write("# Shall this node automatically upload files created or modified in a local directory?\n")
+ c.write("enabled = false\n")
+ c.write("# This must be an URI for a writeable directory.\n")
+ c.write("upload.uri =\n")
+ c.write("local.directory = ~/drop_upload\n")
+ c.write("\n")
+
c.close()
from allmydata.util import fileutil
addfile ./src/allmydata/test/fake_inotify.py
hunk ./src/allmydata/test/fake_inotify.py 1
+
+# Most of this is copied from Twisted 11.0. The reason for this hack is that
+# twisted.internet.inotify can't be imported when the platform does not support inotify.
+
+
+# from /usr/src/linux/include/linux/inotify.h
+
+IN_ACCESS = 0x00000001L # File was accessed
+IN_MODIFY = 0x00000002L # File was modified
+IN_ATTRIB = 0x00000004L # Metadata changed
+IN_CLOSE_WRITE = 0x00000008L # Writeable file was closed
+IN_CLOSE_NOWRITE = 0x00000010L # Unwriteable file closed
+IN_OPEN = 0x00000020L # File was opened
+IN_MOVED_FROM = 0x00000040L # File was moved from X
+IN_MOVED_TO = 0x00000080L # File was moved to Y
+IN_CREATE = 0x00000100L # Subfile was created
+IN_DELETE = 0x00000200L # Subfile was delete
+IN_DELETE_SELF = 0x00000400L # Self was deleted
+IN_MOVE_SELF = 0x00000800L # Self was moved
+IN_UNMOUNT = 0x00002000L # Backing fs was unmounted
+IN_Q_OVERFLOW = 0x00004000L # Event queued overflowed
+IN_IGNORED = 0x00008000L # File was ignored
+
+IN_ONLYDIR = 0x01000000 # only watch the path if it is a directory
+IN_DONT_FOLLOW = 0x02000000 # don't follow a sym link
+IN_MASK_ADD = 0x20000000 # add to the mask of an already existing watch
+IN_ISDIR = 0x40000000 # event occurred against dir
+IN_ONESHOT = 0x80000000 # only send event once
+
+IN_CLOSE = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE # closes
+IN_MOVED = IN_MOVED_FROM | IN_MOVED_TO # moves
+IN_CHANGED = IN_MODIFY | IN_ATTRIB # changes
+
+IN_WATCH_MASK = (IN_MODIFY | IN_ATTRIB |
+ IN_CREATE | IN_DELETE |
+ IN_DELETE_SELF | IN_MOVE_SELF |
+ IN_UNMOUNT | IN_MOVED_FROM | IN_MOVED_TO)
+
+
+_FLAG_TO_HUMAN = [
+ (IN_ACCESS, 'access'),
+ (IN_MODIFY, 'modify'),
+ (IN_ATTRIB, 'attrib'),
+ (IN_CLOSE_WRITE, 'close_write'),
+ (IN_CLOSE_NOWRITE, 'close_nowrite'),
+ (IN_OPEN, 'open'),
+ (IN_MOVED_FROM, 'moved_from'),
+ (IN_MOVED_TO, 'moved_to'),
+ (IN_CREATE, 'create'),
+ (IN_DELETE, 'delete'),
+ (IN_DELETE_SELF, 'delete_self'),
+ (IN_MOVE_SELF, 'move_self'),
+ (IN_UNMOUNT, 'unmount'),
+ (IN_Q_OVERFLOW, 'queue_overflow'),
+ (IN_IGNORED, 'ignored'),
+ (IN_ONLYDIR, 'only_dir'),
+ (IN_DONT_FOLLOW, 'dont_follow'),
+ (IN_MASK_ADD, 'mask_add'),
+ (IN_ISDIR, 'is_dir'),
+ (IN_ONESHOT, 'one_shot')
+]
+
+
+
+def humanReadableMask(mask):
+ """
+ Auxiliary function that converts an hexadecimal mask into a series
+ of human readable flags.
+ """
+ s = []
+ for k, v in _FLAG_TO_HUMAN:
+ if k & mask:
+ s.append(v)
+ return s
+
+
+# This class is not copied from Twisted; it acts as a mock.
+class INotify(object):
+ def startReading(self):
+ pass
+
+ def stopReading(self):
+ pass
+
+ def watch(self, filepath, mask=IN_WATCH_MASK, autoAdd=False, callbacks=None, recursive=False):
+ self.callbacks = callbacks
+
+ def event(self, filepath, mask):
+ for cb in self.callbacks:
+ cb(None, filepath, mask)
+
+
+__all__ = ["INotify", "humanReadableMask", "IN_WATCH_MASK", "IN_ACCESS",
+ "IN_MODIFY", "IN_ATTRIB", "IN_CLOSE_NOWRITE", "IN_CLOSE_WRITE",
+ "IN_OPEN", "IN_MOVED_FROM", "IN_MOVED_TO", "IN_CREATE",
+ "IN_DELETE", "IN_DELETE_SELF", "IN_MOVE_SELF", "IN_UNMOUNT",
+ "IN_Q_OVERFLOW", "IN_IGNORED", "IN_ONLYDIR", "IN_DONT_FOLLOW",
+ "IN_MASK_ADD", "IN_ISDIR", "IN_ONESHOT", "IN_CLOSE",
+ "IN_MOVED", "IN_CHANGED"]
addfile ./src/allmydata/test/test_drop_upload.py
hunk ./src/allmydata/test/test_drop_upload.py 1
+
+import os, sys, platform
+
+from twisted.trial import unittest
+from twisted.python import filepath, runtime
+from twisted.internet import defer, base
+
+from allmydata.interfaces import IDirectoryNode, NoSuchChildError
+
+from allmydata.util import fileutil
+from allmydata.util.consumer import download_to_data
+from allmydata.test.no_network import GridTestMixin
+from allmydata.test.common_util import ReallyEqualMixin
+from allmydata.test.common import ShouldFailMixin
+from allmydata.test import fake_inotify
+
+from allmydata.frontends.drop_upload import DropUploader
+
+
+class DropUploadTestMixin(GridTestMixin, ShouldFailMixin, ReallyEqualMixin):
+ """
+ These tests will be run both with a mock notifier, and (on platforms that support it)
+ with the real INotify.
+ """
+
+ def _test(self):
+ self.uploader = None
+ self.set_up_grid()
+ self.local_dir = os.path.join(self.basedir, "local_dir")
+ os.mkdir(self.local_dir)
+
+ self.client = self.g.clients[0]
+ d = self.client.create_dirnode()
+ def _made_upload_dir(n):
+ self.failUnless(IDirectoryNode.providedBy(n))
+ self.upload_dirnode = n
+ self.upload_uri = n.get_uri()
+ self.uploader = DropUploader(self.client, self.upload_uri, self.local_dir, inotify=self.inotify)
+ d.addCallback(_made_upload_dir)
+
+ # Write something short enough for a LIT file.
+ d.addCallback(lambda ign: self._test_file("short", "test"))
+
+ # Write to the same file again with different data.
+ d.addCallback(lambda ign: self._test_file("short", "different"))
+
+ # Test that temporary files are not uploaded.
+ d.addCallback(lambda ign: self._test_file("tempfile", "test", temporary=True))
+
+ # Test that we tolerate creation of a subdirectory.
+ d.addCallback(lambda ign: os.mkdir(os.path.join(self.local_dir, "directory")))
+
+ # Write something longer, and also try to test a Unicode name if the fs can represent it.
+ try:
+ name = u"l\u00F8ng".encode(sys.getfilesystemencoding())
+ except UnicodeEncodeError:
+ name = "long"
+ d.addCallback(lambda ign: self._test_file(name, "test"*100))
+
+ # TODO: test that causes an upload failure.
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self.uploader.failed, 0))
+
+ # Prevent unclean reactor errors.
+ def _cleanup(res):
+ if self.uploader is not None:
+ self.uploader.finish()
+ return res
+ d.addBoth(_cleanup)
+ return d
+
+ def _test_file(self, name, data, temporary=False):
+ previously_uploaded = self.uploader.uploaded
+ previously_disappeared = self.uploader.disappeared
+
+ d = defer.Deferred()
+
+ # Note: this relies on the fact that we only get one IN_CLOSE_WRITE notification per file
+ # (otherwise we would get a defer.AlreadyCalledError). Should we be relying on that?
+ self.uploader.set_uploaded_callback(d.callback)
+
+ path = filepath.FilePath(os.path.join(self.local_dir, name))
+ unicode_name = name.decode(sys.getfilesystemencoding())
+
+ f = open(path.path, "wb")
+ try:
+ if temporary and sys.platform != "win32":
+ os.unlink(path.path)
+ f.write(data)
+ finally:
+ f.close()
+ if temporary and sys.platform == "win32":
+ os.unlink(path.path)
+ self.notify_close_write(path)
+
+ if temporary:
+ d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, 'temp file not uploaded', None,
+ self.upload_dirnode.get, unicode_name))
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self.uploader.disappeared, previously_disappeared + 1))
+ else:
+ d.addCallback(lambda ign: self.upload_dirnode.get(unicode_name))
+ d.addCallback(download_to_data)
+ d.addCallback(lambda actual_data: self.failUnlessReallyEqual(actual_data, data))
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self.uploader.uploaded, previously_uploaded + 1))
+ return d
+
+
+class MockTest(DropUploadTestMixin, unittest.TestCase):
+ """This can run on any platform, and even if twisted.internet.inotify can't be imported."""
+
+ def test_errors(self):
+ self.basedir = "drop_upload.MockTest.test_errors"
+ self.set_up_grid()
+ errors_dir = os.path.join(self.basedir, "errors_dir")
+ os.mkdir(errors_dir)
+
+ client = self.g.clients[0]
+ d = client.create_dirnode()
+ def _made_upload_dir(n):
+ self.failUnless(IDirectoryNode.providedBy(n))
+ upload_uri = n.get_uri()
+ readonly_uri = n.get_readonly_uri()
+
+ self.shouldFail(AssertionError, 'invalid local dir', 'could not be represented',
+ DropUploader, client, upload_uri, '\xFF', inotify=fake_inotify)
+ self.shouldFail(AssertionError, 'non-existant local dir', 'not an existing directory',
+ DropUploader, client, upload_uri, os.path.join(self.basedir, "Laputa"), inotify=fake_inotify)
+
+ self.shouldFail(AssertionError, 'bad URI', 'not a directory URI',
+ DropUploader, client, 'bad', errors_dir, inotify=fake_inotify)
+ self.shouldFail(AssertionError, 'non-directory URI', 'not a directory URI',
+ DropUploader, client, 'URI:LIT:foo', errors_dir, inotify=fake_inotify)
+ self.shouldFail(AssertionError, 'readonly directory URI', 'does not refer to a writeable directory',
+ DropUploader, client, readonly_uri, errors_dir, inotify=fake_inotify)
+ d.addCallback(_made_upload_dir)
+ return d
+
+ def test_drop_upload(self):
+ self.inotify = fake_inotify
+ self.basedir = "drop_upload.MockTest.test_drop_upload"
+ return self._test()
+
+ def notify_close_write(self, path):
+ self.uploader._notifier.event(path, self.inotify.IN_CLOSE_WRITE)
+
+
+class RealTest(DropUploadTestMixin, unittest.TestCase):
+ """This is skipped unless both Twisted and the platform support inotify."""
+
+ def test_drop_upload(self):
+ # We should always have runtime.platform.supportsINotify, because we're using
+ # Twisted >= 10.1.
+ if not runtime.platform.supportsINotify():
+ raise unittest.SkipTest("Drop-upload support can only be tested for-real on an OS that supports inotify.")
+
+ self.inotify = None # use the real twisted.internet.inotify
+ self.basedir = "drop_upload.RealTest.test_drop_upload"
+ return self._test()
+
+ def notify_close_write(self, path):
+ # Writing to the file causes the notification.
+ pass
hunk ./src/allmydata/test/test_runner.py 256
self.failUnless(re.search(r"\n\[storage\]\n#.*\nenabled = true\n", content), content)
self.failUnless("\nreserved_space = 1G\n" in content)
+ self.failUnless(re.search(r"\n\[drop_upload\]\n#.*\nenabled = false\n", content), content)
+
# creating the node a second time should be rejected
rc, out, err = self.run_tahoe(argv)
self.failIfEqual(rc, 0, str((out, err, rc)))
}
[drop-upload: make counts visible on the statistics page, and disable some debugging. refs #1429
david-sarah@jacaranda.org**20110727023003
Ignore-this: 4e25022cca41d6012da067e96fadb1bf
] {
hunk ./src/allmydata/frontends/drop_upload.py 7
from twisted.internet import defer
from twisted.python.filepath import FilePath
from twisted.application import service
+from foolscap.api import eventually
from allmydata.interfaces import IDirectoryNode
hunk ./src/allmydata/frontends/drop_upload.py 26
% quote_output(local_dir_utf8))
self._client = client
+ self._stats_provider = client.stats_provider
self._convergence = client.convergence
self._local_path = FilePath(local_dir)
hunk ./src/allmydata/frontends/drop_upload.py 29
- self.uploaded = 0
- self.failed = 0
- self.disappeared = 0
if inotify is None:
from twisted.internet import inotify
hunk ./src/allmydata/frontends/drop_upload.py 48
self._notifier = inotify.INotify()
self._notifier.startReading()
+ self._stats_provider.count('drop_upload.dirs_monitored', 1)
# We don't watch for IN_CREATE, because that would cause us to read and upload a
# possibly-incomplete file before the application has closed it. There should always
hunk ./src/allmydata/frontends/drop_upload.py 60
def _notify(self, opaque, path, events_mask):
self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask))))
+ self._stats_provider.count('drop_upload.files_queued', 1)
+ eventually(self._process, opaque, path, events_mask)
+
+ def _process(self, opaque, path, events_mask):
d = defer.succeed(None)
# FIXME: if this already exists as a mutable file, we replace the directory entry,
hunk ./src/allmydata/frontends/drop_upload.py 75
d.addCallback(_add_file)
def _succeeded(ign):
- self.uploaded += 1
+ self._stats_provider.count('drop_upload.files_queued', -1)
+ self._stats_provider.count('drop_upload.files_uploaded', 1)
def _failed(f):
hunk ./src/allmydata/frontends/drop_upload.py 78
+ self._stats_provider.count('drop_upload.files_queued', -1)
if path.exists():
self._log("drop-upload: %r failed to upload due to %r" % (path.path, f))
hunk ./src/allmydata/frontends/drop_upload.py 81
- self.failed += 1
+ self._stats_provider.count('drop_upload.files_failed', 1)
return f
else:
self._log("drop-upload: notified file %r disappeared "
hunk ./src/allmydata/frontends/drop_upload.py 86
"(this is normal for temporary files): %r" % (path.path, f))
- self.disappeared += 1
+ self._stats_provider.count('drop_upload.files_disappeared', 1)
return None
d.addCallbacks(_succeeded, _failed)
d.addBoth(self._uploaded_callback)
hunk ./src/allmydata/frontends/drop_upload.py 98
def finish(self):
self._notifier.stopReading()
+ self._stats_provider.count('drop_upload.dirs_monitored', -1)
def _log(self, msg):
self._client.log(msg)
hunk ./src/allmydata/frontends/drop_upload.py 102
- open("events", "ab+").write(msg)
+ #open("events", "ab+").write(msg)
hunk ./src/allmydata/test/test_drop_upload.py 26
with the real INotify.
"""
+ def _get_count(self, name):
+ return self.stats_provider.get_stats()["counters"].get(name, 0)
+
def _test(self):
self.uploader = None
self.set_up_grid()
hunk ./src/allmydata/test/test_drop_upload.py 36
os.mkdir(self.local_dir)
self.client = self.g.clients[0]
+ self.stats_provider = self.client.stats_provider
+
d = self.client.create_dirnode()
def _made_upload_dir(n):
self.failUnless(IDirectoryNode.providedBy(n))
hunk ./src/allmydata/test/test_drop_upload.py 66
d.addCallback(lambda ign: self._test_file(name, "test"*100))
# TODO: test that causes an upload failure.
- d.addCallback(lambda ign: self.failUnlessReallyEqual(self.uploader.failed, 0))
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self._get_count('drop_upload.files_failed'), 0))
# Prevent unclean reactor errors.
def _cleanup(res):
hunk ./src/allmydata/test/test_drop_upload.py 77
return d
def _test_file(self, name, data, temporary=False):
- previously_uploaded = self.uploader.uploaded
- previously_disappeared = self.uploader.disappeared
+ previously_uploaded = self._get_count('drop_upload.files_uploaded')
+ previously_disappeared = self._get_count('drop_upload.files_disappeared')
d = defer.Deferred()
hunk ./src/allmydata/test/test_drop_upload.py 103
if temporary:
d.addCallback(lambda ign: self.shouldFail(NoSuchChildError, 'temp file not uploaded', None,
self.upload_dirnode.get, unicode_name))
- d.addCallback(lambda ign: self.failUnlessReallyEqual(self.uploader.disappeared, previously_disappeared + 1))
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self._get_count('drop_upload.files_disappeared'),
+ previously_disappeared + 1))
else:
d.addCallback(lambda ign: self.upload_dirnode.get(unicode_name))
d.addCallback(download_to_data)
hunk ./src/allmydata/test/test_drop_upload.py 109
d.addCallback(lambda actual_data: self.failUnlessReallyEqual(actual_data, data))
- d.addCallback(lambda ign: self.failUnlessReallyEqual(self.uploader.uploaded, previously_uploaded + 1))
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self._get_count('drop_upload.files_uploaded'),
+ previously_uploaded + 1))
+
+ d.addCallback(lambda ign: self.failUnlessReallyEqual(self._get_count('drop_upload.files_queued'), 0))
return d
hunk ./src/allmydata/web/statistics.xhtml 12
Node Statistics
+General
+
- Load Average:
- Peak Load:
hunk ./src/allmydata/web/statistics.xhtml 23
- Files Retrieved (mutable):
+Drop-Uploader
+
+
+ - Local Directories Monitored:
+ - Files Uploaded:
+ - File Changes Queued:
+ - Failed Uploads:
+
+
Raw Stats:
hunk ./src/allmydata/web/status.py 1293
return "%s files / %s bytes (%s)" % (files, bytes,
abbreviate_size(bytes))
+ def render_drop_monitored(self, ctx, data):
+ dirs = data["counters"].get("drop_upload.dirs_monitored", 0)
+ return "%s directories" % (dirs,)
+
+ def render_drop_uploads(self, ctx, data):
+ # TODO: bytes uploaded
+ files = data["counters"].get("drop_upload.files_uploaded", 0)
+ return "%s files" % (files,)
+
+ def render_drop_queued(self, ctx, data):
+ files = data["counters"].get("drop_upload.files_queued", 0)
+ return "%s files" % (files,)
+
+ def render_drop_failed(self, ctx, data):
+ files = data["counters"].get("drop_upload.files_failed", 0)
+ return "%s files" % (files,)
+
def render_raw(self, ctx, data):
raw = pprint.pformat(data)
return ctx.tag[raw]
}
[Prototype Windows implementation of drop-uploader. refs #1431
david-sarah@jacaranda.org**20110807192250
Ignore-this: 16937b2dd661d42056d528c08846f0bb
] {
move ./src/allmydata/test/fake_inotify.py ./src/allmydata/util/fake_inotify.py
hunk ./src/allmydata/client.py 435
from allmydata.frontends import drop_upload
s = drop_upload.DropUploader(self, upload_uri, local_dir_utf8)
s.setServiceParent(self)
+ s.start()
except Exception, e:
self.log("couldn't start drop-uploader: %r", args=(e,))
else:
hunk ./src/allmydata/frontends/drop_upload.py 16
class DropUploader(service.MultiService):
- def __init__(self, client, upload_uri, local_dir_utf8, inotify=None):
+ def __init__(self, client, upload_uri, local_dir_utf8, inotify=None, pending_delay=1.0):
service.MultiService.__init__(self)
try:
hunk ./src/allmydata/frontends/drop_upload.py 31
self._local_path = FilePath(local_dir)
if inotify is None:
- from twisted.internet import inotify
+ if sys.platform == "win32":
+ from allmydata.windows import inotify
+ else:
+ from twisted.internet import inotify
self._inotify = inotify
if not self._local_path.isdir():
hunk ./src/allmydata/frontends/drop_upload.py 50
self._uploaded_callback = lambda ign: None
self._notifier = inotify.INotify()
- self._notifier.startReading()
- self._stats_provider.count('drop_upload.dirs_monitored', 1)
+ if hasattr(self._notifier, 'set_pending_delay'):
+ self._notifier.set_pending_delay(pending_delay)
# We don't watch for IN_CREATE, because that would cause us to read and upload a
# possibly-incomplete file before the application has closed it. There should always
hunk ./src/allmydata/frontends/drop_upload.py 56
# be an IN_CLOSE_WRITE after an IN_CREATE (I think).
- # TODO: what about IN_MOVE_SELF?
- mask = inotify.IN_CLOSE_WRITE | inotify.IN_MOVED_TO
+ # TODO: what about IN_MOVE_SELF or IN_UNMOUNT?
+ mask = inotify.IN_CLOSE_WRITE | inotify.IN_MOVED_TO | inotify.IN_ONLYDIR
self._notifier.watch(self._local_path, mask=mask, callbacks=[self._notify])
hunk ./src/allmydata/frontends/drop_upload.py 60
+ def start(self):
+ d = self._notifier.startReading()
+ self._stats_provider.count('drop_upload.dirs_monitored', 1)
+ return d
+
def _notify(self, opaque, path, events_mask):
self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask))))
hunk ./src/allmydata/frontends/drop_upload.py 77
# FIXME: if this already exists as a mutable file, we replace the directory entry,
# but we should probably modify the file (as the SFTP frontend does).
def _add_file(ign):
- name = path.basename().decode(sys.getfilesystemencoding())
+ name = path.basename()
+ # on Windows the name is already Unicode
+ if not isinstance(name, unicode):
+ name = name.decode(sys.getfilesystemencoding())
+
u = FileName(path.path, self._convergence)
return self._parent.add_file(name, u)
d.addCallback(_add_file)
hunk ./src/allmydata/frontends/drop_upload.py 108
"""This sets a function that will be called after a file has been uploaded."""
self._uploaded_callback = callback
- def finish(self):
+ def finish(self, for_tests=False):
self._notifier.stopReading()
self._stats_provider.count('drop_upload.dirs_monitored', -1)
hunk ./src/allmydata/frontends/drop_upload.py 111
+ if for_tests and hasattr(self._notifier, 'wait_until_stopped'):
+ return self._notifier.wait_until_stopped()
+ else:
+ return defer.succeed(None)
def _log(self, msg):
self._client.log(msg)
hunk ./src/allmydata/test/test_drop_upload.py 10
from allmydata.interfaces import IDirectoryNode, NoSuchChildError
-from allmydata.util import fileutil
+from allmydata.util import fileutil, fake_inotify
from allmydata.util.consumer import download_to_data
from allmydata.test.no_network import GridTestMixin
from allmydata.test.common_util import ReallyEqualMixin
hunk ./src/allmydata/test/test_drop_upload.py 15
from allmydata.test.common import ShouldFailMixin
-from allmydata.test import fake_inotify
from allmydata.frontends.drop_upload import DropUploader
hunk ./src/allmydata/test/test_drop_upload.py 42
self.failUnless(IDirectoryNode.providedBy(n))
self.upload_dirnode = n
self.upload_uri = n.get_uri()
- self.uploader = DropUploader(self.client, self.upload_uri, self.local_dir, inotify=self.inotify)
+ self.uploader = DropUploader(self.client, self.upload_uri, self.local_dir,
+ inotify=self.inotify, pending_delay=0.2)
+ return self.uploader.start()
d.addCallback(_made_upload_dir)
# Write something short enough for a LIT file.
hunk ./src/allmydata/test/test_drop_upload.py 71
# Prevent unclean reactor errors.
def _cleanup(res):
+ d = defer.succeed(None)
if self.uploader is not None:
hunk ./src/allmydata/test/test_drop_upload.py 73
- self.uploader.finish()
- return res
+ d.addCallback(lambda ign: self.uploader.finish(for_tests=True))
+ d.addCallback(lambda ign: res)
+ return d
d.addBoth(_cleanup)
return d
hunk ./src/allmydata/test/test_drop_upload.py 101
f.close()
if temporary and sys.platform == "win32":
os.unlink(path.path)
+ fileutil.flush_volume(path.path)
self.notify_close_write(path)
if temporary:
hunk ./src/allmydata/test/test_drop_upload.py 165
def test_drop_upload(self):
# We should always have runtime.platform.supportsINotify, because we're using
# Twisted >= 10.1.
- if not runtime.platform.supportsINotify():
- raise unittest.SkipTest("Drop-upload support can only be tested for-real on an OS that supports inotify.")
+ if not sys.platform == "win32" and not runtime.platform.supportsINotify():
+ raise unittest.SkipTest("Drop-upload support can only be tested for-real on an OS that supports inotify or equivalent.")
hunk ./src/allmydata/test/test_drop_upload.py 168
- self.inotify = None # use the real twisted.internet.inotify
+ self.inotify = None # use the appropriate inotify for the platform
self.basedir = "drop_upload.RealTest.test_drop_upload"
return self._test()
hunk ./src/allmydata/util/fileutil.py 422
log.msg("OS call to get disk statistics failed")
return 0
+
+if sys.platform == "win32":
+ from ctypes import WINFUNCTYPE, windll, WinError
+ from ctypes.wintypes import BOOL, HANDLE, DWORD, LPCWSTR, LPVOID
+
+ #
+ CreateFileW = WINFUNCTYPE(HANDLE, LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE) \
+ (("CreateFileW", windll.kernel32))
+
+ GENERIC_WRITE = 0x40000000
+ FILE_SHARE_READ = 0x00000001
+ FILE_SHARE_WRITE = 0x00000002
+ OPEN_EXISTING = 3
+ INVALID_HANDLE_VALUE = 0xFFFFFFFF
+
+ #
+ FlushFileBuffers = WINFUNCTYPE(BOOL, HANDLE)(("FlushFileBuffers", windll.kernel32))
+
+ #
+ CloseHandle = WINFUNCTYPE(BOOL, HANDLE)(("CloseHandle", windll.kernel32))
+
+ #
+ def flush_volume(path):
+ drive = os.path.splitdrive(os.path.realpath(path))[0]
+
+ hVolume = CreateFileW(u"\\\\.\\" + drive,
+ GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE,
+ None,
+ OPEN_EXISTING,
+ 0,
+ None
+ )
+ if hVolume == INVALID_HANDLE_VALUE:
+ raise WinError()
+
+ if FlushFileBuffers(hVolume) == 0:
+ raise WinError()
+
+ CloseHandle(hVolume)
+else:
+ def flush_volume(path):
+ # use sync()?
+ pass
+
addfile ./src/allmydata/windows/inotify.py
hunk ./src/allmydata/windows/inotify.py 1
+
+# Windows near-equivalent to twisted.internet.inotify
+# This should only be imported on Windows.
+
+import os, sys
+
+from twisted.internet import reactor
+from twisted.internet.threads import deferToThread
+
+from allmydata.util.fake_inotify import *
+from allmydata.util.encodingutil import quote_output
+from allmydata.util import log, fileutil
+from allmydata.util.pollmixin import PollMixin
+
+from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, create_string_buffer, addressof
+from ctypes.wintypes import BOOL, HANDLE, DWORD, LPCWSTR, LPVOID
+
+#
+FILE_LIST_DIRECTORY = 1
+
+#
+CreateFileW = WINFUNCTYPE(HANDLE, LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE) \
+ (("CreateFileW", windll.kernel32))
+
+FILE_SHARE_READ = 0x00000001
+FILE_SHARE_WRITE = 0x00000002
+FILE_SHARE_DELETE = 0x00000004
+
+OPEN_EXISTING = 3
+
+FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+
+#
+CloseHandle = WINFUNCTYPE(BOOL, HANDLE)(("CloseHandle", windll.kernel32))
+
+#
+ReadDirectoryChangesW = WINFUNCTYPE(BOOL, HANDLE, LPVOID, DWORD, BOOL, DWORD, POINTER(DWORD), LPVOID, LPVOID) \
+ (("ReadDirectoryChangesW", windll.kernel32))
+
+FILE_NOTIFY_CHANGE_FILE_NAME = 0x00000001
+FILE_NOTIFY_CHANGE_DIR_NAME = 0x00000002
+FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x00000004
+#FILE_NOTIFY_CHANGE_SIZE = 0x00000008
+FILE_NOTIFY_CHANGE_LAST_WRITE = 0x00000010
+FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x00000020
+#FILE_NOTIFY_CHANGE_CREATION = 0x00000040
+FILE_NOTIFY_CHANGE_SECURITY = 0x00000100
+
+#
+FILE_ACTION_ADDED = 0x00000001
+FILE_ACTION_REMOVED = 0x00000002
+FILE_ACTION_MODIFIED = 0x00000003
+FILE_ACTION_RENAMED_OLD_NAME = 0x00000004
+FILE_ACTION_RENAMED_NEW_NAME = 0x00000005
+
+_action_to_string = {
+ FILE_ACTION_ADDED : "FILE_ACTION_ADDED",
+ FILE_ACTION_REMOVED : "FILE_ACTION_REMOVED",
+ FILE_ACTION_MODIFIED : "FILE_ACTION_MODIFIED",
+ FILE_ACTION_RENAMED_OLD_NAME : "FILE_ACTION_RENAMED_OLD_NAME",
+ FILE_ACTION_RENAMED_NEW_NAME : "FILE_ACTION_RENAMED_NEW_NAME",
+}
+
+_action_to_inotify_mask = {
+ FILE_ACTION_ADDED : IN_CREATE,
+ FILE_ACTION_REMOVED : IN_DELETE,
+ FILE_ACTION_MODIFIED : IN_CHANGED,
+ FILE_ACTION_RENAMED_OLD_NAME : IN_MOVED_FROM,
+ FILE_ACTION_RENAMED_NEW_NAME : IN_MOVED_TO,
+}
+
+INVALID_HANDLE_VALUE = 0xFFFFFFFF
+
+
+class Event(object):
+ """
+ * action: a FILE_ACTION_* constant (not a bit mask)
+ * filename: a Unicode string, giving the name relative to the watched directory
+ """
+ def __init__(self, action, filename):
+ self.action = action
+ self.filename = filename
+
+ def __repr__(self):
+ return "Event(%r, %r)" % (_action_to_string.get(self.action, self.action), self.filename)
+
+
+class FileNotifyInformation(object):
+ """
+ I represent a buffer containing FILE_NOTIFY_INFORMATION structures, and can
+ iterate over those structures, decoding them into Event objects.
+ """
+
+ def __init__(self, size=1024):
+ self.size = size
+ self.buffer = create_string_buffer(size)
+ address = addressof(self.buffer)
+ assert address & 3 == 0, "address 0x%X returned by create_string_buffer is not DWORD-aligned" % (address,)
+ self.data = None
+
+ def read_changes(self, hDirectory, recursive, filter):
+ bytes_returned = DWORD(0)
+ r = ReadDirectoryChangesW(hDirectory,
+ self.buffer,
+ self.size,
+ recursive,
+ filter,
+ byref(bytes_returned),
+ None, # NULL -> no overlapped I/O
+ None # NULL -> no completion routine
+ )
+ if r == 0:
+ raise WinError()
+ self.data = self.buffer.raw[:bytes_returned.value]
+
+ def __iter__(self):
+ # Iterator implemented as generator:
+ pos = 0
+ while True:
+ bytes = self._read_dword(pos+8)
+ s = Event(self._read_dword(pos+4),
+ self.data[pos+12 : pos+12+bytes].decode('utf-16-le'))
+
+ next_entry_offset = self._read_dword(pos)
+ yield s
+ if next_entry_offset == 0:
+ break
+ pos = pos + next_entry_offset
+
+ def _read_dword(self, i):
+ # little-endian
+ return ( ord(self.data[i]) |
+ (ord(self.data[i+1]) << 8) |
+ (ord(self.data[i+2]) << 16) |
+ (ord(self.data[i+3]) << 24))
+
+
+def _open_directory(path_u):
+ hDirectory = CreateFileW(path_u,
+ FILE_LIST_DIRECTORY, # access rights
+ FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
+ # don't prevent other processes from accessing
+ None, # no security descriptor
+ OPEN_EXISTING, # directory must already exist
+ FILE_FLAG_BACKUP_SEMANTICS, # necessary to open a directory
+ None # no template file
+ )
+ if hDirectory == INVALID_HANDLE_VALUE:
+ e = WinError()
+ raise OSError("Opening directory %s gave Windows error %r: %s" % (quote_output(path_u), e.args[0], e.args[1]))
+ return hDirectory
+
+
+def simple_test():
+ path_u = u"test"
+ filter = FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE
+ recursive = False
+
+ hDirectory = _open_directory(path_u)
+ fni = FileNotifyInformation()
+ print "Waiting..."
+ while True:
+ fni.read_changes(hDirectory, recursive, filter)
+ print repr(fni.data)
+ for info in fni:
+ print info
+
+
+class INotify(PollMixin):
+ def __init__(self):
+ self._stop = None
+ self._filter = None
+ self._callbacks = None
+ self._hDirectory = None
+ self._path = None
+ self._pending = set()
+ self._pending_delay = 1.0
+
+ def set_pending_delay(self, delay):
+ self._pending_delay = delay
+
+ def startReading(self):
+ deferToThread(self._thread)
+ return self.poll(lambda: self._stop == False)
+
+ def stopReading(self):
+ if self._stop is not None:
+ self._stop = True
+
+ def wait_until_stopped(self):
+ fileutil.write(os.path.join(self._path.path, u".ignore-me"), "")
+ return self.poll(lambda: self._stop is None)
+
+ def watch(self, path, mask=IN_WATCH_MASK, autoAdd=False, callbacks=None, recursive=False):
+ assert self._stop is None, "watch() can only be called before startReading()"
+ assert self._filter is None, "only one watch is supported"
+ assert isinstance(autoAdd, bool), autoAdd
+ assert isinstance(recursive, bool), recursive
+ assert autoAdd == recursive, ("autoAdd = %r, recursive = %r, but we need them to be the same"
+ % (autoAdd, recursive))
+
+ self._path = path
+ path_u = path.path
+ if not isinstance(path_u, unicode):
+ path_u = path_u.decode(sys.getfilesystemencoding())
+ assert isinstance(path_u, unicode), path_u
+
+ self._filter = FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE
+
+ if mask & (IN_ACCESS | IN_CLOSE_NOWRITE | IN_OPEN):
+ self._filter = self._filter | FILE_NOTIFY_CHANGE_LAST_ACCESS
+ if mask & IN_ATTRIB:
+ self._filter = self._filter | FILE_NOTIFY_CHANGE_ATTRIBUTES | FILE_NOTIFY_CHANGE_SECURITY
+
+ self._recursive = recursive
+ self._callbacks = callbacks or []
+ self._hDirectory = _open_directory(path_u)
+
+ def _thread(self):
+ try:
+ assert self._filter is not None, "no watch set"
+
+ # To call Twisted or Tahoe APIs, use reactor.callFromThread as described in
+ # .
+
+ fni = FileNotifyInformation()
+
+ while True:
+ self._stop = False
+ fni.read_changes(self._hDirectory, self._recursive, self._filter)
+ for info in fni:
+ if self._stop:
+ hDirectory = self._hDirectory
+ self._callbacks = None
+ self._hDirectory = None
+ CloseHandle(hDirectory)
+ self._stop = None
+ return
+
+ path = self._path.preauthChild(info.filename) # FilePath with Unicode path
+ mask = _action_to_inotify_mask.get(info.action, IN_CHANGED)
+
+ def _maybe_notify():
+ event = (path, mask)
+ if event not in self._pending:
+ self._pending.add(event)
+ def _do_callbacks():
+ self._pending.remove(event)
+ for cb in self._callbacks:
+ try:
+ cb(None, path, mask)
+ except Exception, e:
+ log.msg(e)
+ reactor.callLater(self._pending_delay, _do_callbacks)
+ reactor.callFromThread(_maybe_notify)
+ except Exception, e:
+ log.msg(e)
+ self.stop = False # pretend we started
+ raise
}
Context:
[node.py: ensure that client and introducer nodes record their port number and use that port on the next restart, fixing a regression caused by #1385. fixes #1469.
david-sarah@jacaranda.org**20110806221934
Ignore-this: 1aa9d340b6570320ab2f9edc89c9e0a8
]
[test_runner.py: fix a race condition in the test when NODE_URL_FILE is written before PORTNUM_FILE. refs #1469
david-sarah@jacaranda.org**20110806231842
Ignore-this: ab01ae7cec3a073e29eec473e64052a0
]
[test_runner.py: cleanups of HOTLINE_FILE writing and removal.
david-sarah@jacaranda.org**20110806231652
Ignore-this: 25f5c5d6f5d8faebb26a4ce80110a335
]
[test_runner.py: remove an unused constant.
david-sarah@jacaranda.org**20110806221416
Ignore-this: eade2695cbabbea9cafeaa8debe410bb
]
[node.py: fix the error path for a missing config option so that it works for a Unicode base directory.
david-sarah@jacaranda.org**20110806221007
Ignore-this: 4eb9cc04b2ce05182a274a0d69dafaf3
]
[test_runner.py: test that client and introducer nodes record their port number and use that port on the next restart. This tests for a regression caused by ref #1385.
david-sarah@jacaranda.org**20110806220635
Ignore-this: 40a0c040b142dbddd47e69b3c3712f5
]
[test_runner.py: fix a bug in CreateNode.do_create introduced in changeset [5114] when the tahoe.cfg file has been written with CRLF line endings. refs #1385
david-sarah@jacaranda.org**20110804003032
Ignore-this: 7b7afdcf99da6671afac2d42828883eb
]
[test_client.py: repair Basic.test_error_on_old_config_files. refs #1385
david-sarah@jacaranda.org**20110803235036
Ignore-this: 31e2a9c3febe55948de7e144353663e
]
[test_checker.py: increase timeout for TooParallel.test_immutable again. The ARM buildslave took 38 seconds, so 40 seconds is too close to the edge; make it 80.
david-sarah@jacaranda.org**20110803214042
Ignore-this: 2d8026a6b25534e01738f78d6c7495cb
]
[test_runner.py: fix RunNode.test_introducer to not rely on the mtime of introducer.furl to detect when the node has restarted. Instead we detect when node.url has been written. refs #1385
david-sarah@jacaranda.org**20110803180917
Ignore-this: 11ddc43b107beca42cb78af88c5c394c
]
[Further improve error message about old config files. refs #1385
david-sarah@jacaranda.org**20110803174546
Ignore-this: 9d6cc3c288d9863dce58faafb3855917
]
[Slightly improve error message about old config files (avoid unnecessary Unicode escaping). refs #1385
david-sarah@jacaranda.org**20110803163848
Ignore-this: a3e3930fba7ccf90b8db3d2ed5829df4
]
[test_checker.py: increase timeout for TooParallel.test_immutable (was consistently failing on ARM buildslave).
david-sarah@jacaranda.org**20110803163213
Ignore-this: d0efceaf12628e8791862b80c85b5d56
]
[Fix the bug that prevents an introducer from starting when introducer.furl already exists. Also remove some dead code that used to read old config files, and rename 'warn_about_old_config_files' to reflect that it's not a warning. refs #1385
david-sarah@jacaranda.org**20110803013212
Ignore-this: 2d6cd14bd06a7493b26f2027aff78f4d
]
[test_runner.py: modify RunNode.test_introducer to test that starting an introducer works when the introducer.furl file already exists. refs #1385
david-sarah@jacaranda.org**20110803012704
Ignore-this: 8cf7f27ac4bfbb5ad8ca4a974106d437
]
[verifier: correct a bug introduced in changeset [5106] that caused us to only verify the first block of a file. refs #1395
david-sarah@jacaranda.org**20110802172437
Ignore-this: 87fb77854a839ff217dce73544775b11
]
[test_repairer: add a deterministic test of share data corruption that always flips the bits of the last byte of the share data. refs #1395
david-sarah@jacaranda.org**20110802175841
Ignore-this: 72f54603785007e88220c8d979e08be7
]
[verifier: serialize the fetching of blocks within a share so that we don't use too much RAM
zooko@zooko.com**20110802063703
Ignore-this: debd9bac07dcbb6803f835a9e2eabaa1
Shares are still verified in parallel, but within a share, don't request a
block until the previous block has been verified and the memory we used to hold
it has been freed up.
Patch originally due to Brian. This version has a mockery-patchery-style test
which is "low tech" (it implements the patching inline in the test code instead
of using an extension of the mock.patch() function from the mock library) and
which unpatches in case of exception.
fixes #1395
]
[add docs about timing-channel attacks
Brian Warner **20110802044541
Ignore-this: 73114d5f5ed9ce252597b707dba3a194
]
['test-coverage' now needs PYTHONPATH=. to find TOP/twisted/plugins/
Brian Warner **20110802041952
Ignore-this: d40f1f4cb426ea1c362fc961baedde2
]
[remove nodeid from WriteBucketProxy classes and customers
warner@lothar.com**20110801224317
Ignore-this: e55334bb0095de11711eeb3af827e8e8
refs #1363
]
[remove get_serverid() from ReadBucketProxy and customers, including Checker
warner@lothar.com**20110801224307
Ignore-this: 837aba457bc853e4fd413ab1a94519cb
and debug.py dump-share commands
refs #1363
]
[reject old-style (pre-Tahoe-LAFS-v1.3) configuration files
zooko@zooko.com**20110801232423
Ignore-this: b58218fcc064cc75ad8f05ed0c38902b
Check for the existence of any of them and if any are found raise exception which will abort the startup of the node.
This is a backwards-incompatible change for anyone who is still using old-style configuration files.
fixes #1385
]
[whitespace-cleanup
zooko@zooko.com**20110725015546
Ignore-this: 442970d0545183b97adc7bd66657876c
]
[tests: use fileutil.write() instead of open() to ensure timely close even without CPython-style reference counting
zooko@zooko.com**20110331145427
Ignore-this: 75aae4ab8e5fa0ad698f998aaa1888ce
Some of these already had an explicit close() but I went ahead and replaced them with fileutil.write() as well for the sake of uniformity.
]
[Address Kevan's comment in #776 about Options classes missed when adding 'self.command_name'. refs #776, #1359
david-sarah@jacaranda.org**20110801221317
Ignore-this: 8881d42cf7e6a1d15468291b0cb8fab9
]
[docs/frontends/webapi.rst: change some more instances of 'delete' or 'remove' to 'unlink', change some section titles, and use two blank lines between all sections. refs #776, #1104
david-sarah@jacaranda.org**20110801220919
Ignore-this: 572327591137bb05c24c44812d4b163f
]
[cleanup: implement rm as a synonym for unlink rather than vice-versa. refs #776
david-sarah@jacaranda.org**20110801220108
Ignore-this: 598dcbed870f4f6bb9df62de9111b343
]
[docs/webapi.rst: address Kevan's comments about use of 'delete' on ref #1104
david-sarah@jacaranda.org**20110801205356
Ignore-this: 4fbf03864934753c951ddeff64392491
]
[docs: some changes of 'delete' or 'rm' to 'unlink'. refs #1104
david-sarah@jacaranda.org**20110713002722
Ignore-this: 304d2a330d5e6e77d5f1feed7814b21c
]
[WUI: change the label of the button to unlink a file from 'del' to 'unlink'. Also change some internal names to 'unlink', and allow 't=unlink' as a synonym for 't=delete' in the web-API interface. Incidentally, improve a test to check for the rename button as well as the unlink button. fixes #1104
david-sarah@jacaranda.org**20110713001218
Ignore-this: 3eef6b3f81b94a9c0020a38eb20aa069
]
[src/allmydata/web/filenode.py: delete a stale comment that was made incorrect by changeset [3133].
david-sarah@jacaranda.org**20110801203009
Ignore-this: b3912e95a874647027efdc97822dd10e
]
[fix typo introduced during rebasing of 'remove get_serverid from
Brian Warner **20110801200341
Ignore-this: 4235b0f585c0533892193941dbbd89a8
DownloadStatus.add_dyhb_request and customers' patch, to fix test failure.
]
[remove get_serverid from DownloadStatus.add_dyhb_request and customers
zooko@zooko.com**20110801185401
Ignore-this: db188c18566d2d0ab39a80c9dc8f6be6
This patch is a rebase of a patch originally written by Brian. I didn't change any of the intent of Brian's patch, just ported it to current trunk.
refs #1363
]
[remove get_serverid from DownloadStatus.add_block_request and customers
zooko@zooko.com**20110801185344
Ignore-this: 8bfa8201d6147f69b0fbe31beea9c1e
This is a rebase of a patch Brian originally wrote. I haven't changed the intent of that patch, just ported it to trunk.
refs #1363
]
[apply zooko's advice: storage_client get_known_servers() returns a frozenset, caller sorts
warner@lothar.com**20110801174452
Ignore-this: 2aa13ea6cbed4e9084bd604bf8633692
refs #1363
]
[test_immutable.Test: rewrite to use NoNetworkGrid, now takes 2.7s not 97s
warner@lothar.com**20110801174444
Ignore-this: 54f30b5d7461d2b3514e2a0172f3a98c
remove now-unused ShareManglingMixin
refs #1363
]
[DownloadStatus.add_known_share wants to be used by Finder, web.status
warner@lothar.com**20110801174436
Ignore-this: 1433bcd73099a579abe449f697f35f9
refs #1363
]
[replace IServer.name() with get_name(), and get_longname()
warner@lothar.com**20110801174428
Ignore-this: e5a6f7f6687fd7732ddf41cfdd7c491b
This patch was originally written by Brian, but was re-recorded by Zooko to use
darcs replace instead of hunks for any file in which it would result in fewer
total hunks.
refs #1363
]
[upload.py: apply David-Sarah's advice rename (un)contacted(2) trackers to first_pass/second_pass/next_pass
zooko@zooko.com**20110801174143
Ignore-this: e36e1420bba0620a0107bd90032a5198
This patch was written by Brian but was re-recorded by Zooko (with David-Sarah looking on) to use darcs replace instead of editing to rename the three variables to their new names.
refs #1363
]
[Coalesce multiple Share.loop() calls, make downloads faster. Closes #1268.
Brian Warner **20110801151834
Ignore-this: 48530fce36c01c0ff708f61c2de7e67a
]
[src/allmydata/_auto_deps.py: 'i686' is another way of spelling x86.
david-sarah@jacaranda.org**20110801034035
Ignore-this: 6971e0621db2fba794d86395b4d51038
]
[tahoe_rm.py: better error message when there is no path. refs #1292
david-sarah@jacaranda.org**20110122064212
Ignore-this: ff3bb2c9f376250e5fd77eb009e09018
]
[test_cli.py: Test for error message when 'tahoe rm' is invoked without a path. refs #1292
david-sarah@jacaranda.org**20110104105108
Ignore-this: 29ec2f2e0251e446db96db002ad5dd7d
]
[src/allmydata/__init__.py: suppress a spurious warning from 'bin/tahoe --version[-and-path]' about twisted-web and twisted-core packages.
david-sarah@jacaranda.org**20110801005209
Ignore-this: 50e7cd53cca57b1870d9df0361c7c709
]
[test_cli.py: use to_str on fields loaded using simplejson.loads in new tests. refs #1304
david-sarah@jacaranda.org**20110730032521
Ignore-this: d1d6dfaefd1b4e733181bf127c79c00b
]
[cli: make 'tahoe cp' overwrite mutable files in-place
Kevan Carstensen **20110729202039
Ignore-this: b2ad21a19439722f05c49bfd35b01855
]
[SFTP: write an error message to standard error for unrecognized shell commands. Change the existing message for shell sessions to be written to standard error, and refactor some duplicated code. Also change the lines of the error messages to end in CRLF, and take into account Kevan's review comments. fixes #1442, #1446
david-sarah@jacaranda.org**20110729233102
Ignore-this: d2f2bb4664f25007d1602bf7333e2cdd
]
[src/allmydata/scripts/cli.py: fix pyflakes warning.
david-sarah@jacaranda.org**20110728021402
Ignore-this: 94050140ddb99865295973f49927c509
]
[Fix the help synopses of CLI commands to include [options] in the right place. fixes #1359, fixes #636
david-sarah@jacaranda.org**20110724225440
Ignore-this: 2a8e488a5f63dabfa9db9efd83768a5
]
[encodingutil: argv and output encodings are always the same on all platforms. Lose the unnecessary generality of them being different. fixes #1120
david-sarah@jacaranda.org**20110629185356
Ignore-this: 5ebacbe6903dfa83ffd3ff8436a97787
]
[docs/man/tahoe.1: add man page. fixes #1420
david-sarah@jacaranda.org**20110724171728
Ignore-this: fc7601ec7f25494288d6141d0ae0004c
]
[Update the dependency on zope.interface to fix an incompatiblity between Nevow and zope.interface 3.6.4. fixes #1435
david-sarah@jacaranda.org**20110721234941
Ignore-this: 2ff3fcfc030fca1a4d4c7f1fed0f2aa9
]
[frontends/ftpd.py: remove the check for IWriteFile.close since we're now guaranteed to be using Twisted >= 10.1 which has it.
david-sarah@jacaranda.org**20110722000320
Ignore-this: 55cd558b791526113db3f83c00ec328a
]
[Update the dependency on Twisted to >= 10.1. This allows us to simplify some documentation: it's no longer necessary to install pywin32 on Windows, or apply a patch to Twisted in order to use the FTP frontend. fixes #1274, #1438. refs #1429
david-sarah@jacaranda.org**20110721233658
Ignore-this: 81b41745477163c9b39c0b59db91cc62
]
[misc/build_helpers/run_trial.py: undo change to block pywin32 (it didn't work because run_trial.py is no longer used). refs #1334
david-sarah@jacaranda.org**20110722035402
Ignore-this: 5d03f544c4154f088e26c7107494bf39
]
[misc/build_helpers/run_trial.py: ensure that pywin32 is not on the sys.path when running the test suite. Includes some temporary debugging printouts that will be removed. refs #1334
david-sarah@jacaranda.org**20110722024907
Ignore-this: 5141a9f83a4085ed4ca21f0bbb20bb9c
]
[docs/running.rst: use 'tahoe run ~/.tahoe' instead of 'tahoe run' (the default is the current directory, unlike 'tahoe start').
david-sarah@jacaranda.org**20110718005949
Ignore-this: 81837fbce073e93d88a3e7ae3122458c
]
[docs/running.rst: say to put the introducer.furl in tahoe.cfg.
david-sarah@jacaranda.org**20110717194315
Ignore-this: 954cc4c08e413e8c62685d58ff3e11f3
]
[README.txt: say that quickstart.rst is in the docs directory.
david-sarah@jacaranda.org**20110717192400
Ignore-this: bc6d35a85c496b77dbef7570677ea42a
]
[setup: remove the dependency on foolscap's "secure_connections" extra, add a dependency on pyOpenSSL
zooko@zooko.com**20110717114226
Ignore-this: df222120d41447ce4102616921626c82
fixes #1383
]
[test_sftp.py cleanup: remove a redundant definition of failUnlessReallyEqual.
david-sarah@jacaranda.org**20110716181813
Ignore-this: 50113380b368c573f07ac6fe2eb1e97f
]
[docs: add missing link in NEWS.rst
zooko@zooko.com**20110712153307
Ignore-this: be7b7eb81c03700b739daa1027d72b35
]
[contrib: remove the contributed fuse modules and the entire contrib/ directory, which is now empty
zooko@zooko.com**20110712153229
Ignore-this: 723c4f9e2211027c79d711715d972c5
Also remove a couple of vestigial references to figleaf, which is long gone.
fixes #1409 (remove contrib/fuse)
]
[add Protovis.js-based download-status timeline visualization
Brian Warner **20110629222606
Ignore-this: 477ccef5c51b30e246f5b6e04ab4a127
provide status overlap info on the webapi t=json output, add decode/decrypt
rate tooltips, add zoomin/zoomout buttons
]
[add more download-status data, fix tests
Brian Warner **20110629222555
Ignore-this: e9e0b7e0163f1e95858aa646b9b17b8c
]
[prepare for viz: improve DownloadStatus events
Brian Warner **20110629222542
Ignore-this: 16d0bde6b734bb501aa6f1174b2b57be
consolidate IDownloadStatusHandlingConsumer stuff into DownloadNode
]
[docs: fix error in crypto specification that was noticed by Taylor R Campbell
zooko@zooko.com**20110629185711
Ignore-this: b921ed60c1c8ba3c390737fbcbe47a67
]
[setup.py: don't make bin/tahoe.pyscript executable. fixes #1347
david-sarah@jacaranda.org**20110130235809
Ignore-this: 3454c8b5d9c2c77ace03de3ef2d9398a
]
[Makefile: remove targets relating to 'setup.py check_auto_deps' which no longer exists. fixes #1345
david-sarah@jacaranda.org**20110626054124
Ignore-this: abb864427a1b91bd10d5132b4589fd90
]
[Makefile: add 'make check' as an alias for 'make test'. Also remove an unnecessary dependency of 'test' on 'build' and 'src/allmydata/_version.py'. fixes #1344
david-sarah@jacaranda.org**20110623205528
Ignore-this: c63e23146c39195de52fb17c7c49b2da
]
[Rename test_package_initialization.py to (much shorter) test_import.py .
Brian Warner **20110611190234
Ignore-this: 3eb3dbac73600eeff5cfa6b65d65822
The former name was making my 'ls' listings hard to read, by forcing them
down to just two columns.
]
[tests: fix tests to accomodate [20110611153758-92b7f-0ba5e4726fb6318dac28fb762a6512a003f4c430]
zooko@zooko.com**20110611163741
Ignore-this: 64073a5f39e7937e8e5e1314c1a302d1
Apparently none of the two authors (stercor, terrell), three reviewers (warner, davidsarah, terrell), or one committer (me) actually ran the tests. This is presumably due to #20.
fixes #1412
]
[wui: right-align the size column in the WUI
zooko@zooko.com**20110611153758
Ignore-this: 492bdaf4373c96f59f90581c7daf7cd7
Thanks to Ted "stercor" Rolle Jr. and Terrell Russell.
fixes #1412
]
[docs: three minor fixes
zooko@zooko.com**20110610121656
Ignore-this: fec96579eb95aceb2ad5fc01a814c8a2
CREDITS for arc for stats tweak
fix link to .zip file in quickstart.rst (thanks to ChosenOne for noticing)
English usage tweak
]
[docs/running.rst: fix stray HTML (not .rst) link noticed by ChosenOne.
david-sarah@jacaranda.org**20110609223719
Ignore-this: fc50ac9c94792dcac6f1067df8ac0d4a
]
[server.py: get_latencies now reports percentiles _only_ if there are sufficient observations for the interpretation of the percentile to be unambiguous.
wilcoxjg@gmail.com**20110527120135
Ignore-this: 2e7029764bffc60e26f471d7c2b6611e
interfaces.py: modified the return type of RIStatsProvider.get_stats to allow for None as a return value
NEWS.rst, stats.py: documentation of change to get_latencies
stats.rst: now documents percentile modification in get_latencies
test_storage.py: test_latencies now expects None in output categories that contain too few samples for the associated percentile to be unambiguously reported.
fixes #1392
]
[docs: revert link in relnotes.txt from NEWS.rst to NEWS, since the former did not exist at revision 5000.
david-sarah@jacaranda.org**20110517011214
Ignore-this: 6a5be6e70241e3ec0575641f64343df7
]
[docs: convert NEWS to NEWS.rst and change all references to it.
david-sarah@jacaranda.org**20110517010255
Ignore-this: a820b93ea10577c77e9c8206dbfe770d
]
[docs: remove out-of-date docs/testgrid/introducer.furl and containing directory. fixes #1404
david-sarah@jacaranda.org**20110512140559
Ignore-this: 784548fc5367fac5450df1c46890876d
]
[scripts/common.py: don't assume that the default alias is always 'tahoe' (it is, but the API of get_alias doesn't say so). refs #1342
david-sarah@jacaranda.org**20110130164923
Ignore-this: a271e77ce81d84bb4c43645b891d92eb
]
[setup: don't catch all Exception from check_requirement(), but only PackagingError and ImportError
zooko@zooko.com**20110128142006
Ignore-this: 57d4bc9298b711e4bc9dc832c75295de
I noticed this because I had accidentally inserted a bug which caused AssertionError to be raised from check_requirement().
]
[M-x whitespace-cleanup
zooko@zooko.com**20110510193653
Ignore-this: dea02f831298c0f65ad096960e7df5c7
]
[docs: fix typo in running.rst, thanks to arch_o_median
zooko@zooko.com**20110510193633
Ignore-this: ca06de166a46abbc61140513918e79e8
]
[relnotes.txt: don't claim to work on Cygwin (which has been untested for some time). refs #1342
david-sarah@jacaranda.org**20110204204902
Ignore-this: 85ef118a48453d93fa4cddc32d65b25b
]
[relnotes.txt: forseeable -> foreseeable. refs #1342
david-sarah@jacaranda.org**20110204204116
Ignore-this: 746debc4d82f4031ebf75ab4031b3a9
]
[replace remaining .html docs with .rst docs
zooko@zooko.com**20110510191650
Ignore-this: d557d960a986d4ac8216d1677d236399
Remove install.html (long since deprecated).
Also replace some obsolete references to install.html with references to quickstart.rst.
Fix some broken internal references within docs/historical/historical_known_issues.txt.
Thanks to Ravi Pinjala and Patrick McDonald.
refs #1227
]
[docs: FTP-and-SFTP.rst: fix a minor error and update the information about which version of Twisted fixes #1297
zooko@zooko.com**20110428055232
Ignore-this: b63cfb4ebdbe32fb3b5f885255db4d39
]
[munin tahoe_files plugin: fix incorrect file count
francois@ctrlaltdel.ch**20110428055312
Ignore-this: 334ba49a0bbd93b4a7b06a25697aba34
fixes #1391
]
[corrected "k must never be smaller than N" to "k must never be greater than N"
secorp@allmydata.org**20110425010308
Ignore-this: 233129505d6c70860087f22541805eac
]
[Fix a test failure in test_package_initialization on Python 2.4.x due to exceptions being stringified differently than in later versions of Python. refs #1389
david-sarah@jacaranda.org**20110411190738
Ignore-this: 7847d26bc117c328c679f08a7baee519
]
[tests: add test for including the ImportError message and traceback entry in the summary of errors from importing dependencies. refs #1389
david-sarah@jacaranda.org**20110410155844
Ignore-this: fbecdbeb0d06a0f875fe8d4030aabafa
]
[allmydata/__init__.py: preserve the message and last traceback entry (file, line number, function, and source line) of ImportErrors in the package versions string. fixes #1389
david-sarah@jacaranda.org**20110410155705
Ignore-this: 2f87b8b327906cf8bfca9440a0904900
]
[remove unused variable detected by pyflakes
zooko@zooko.com**20110407172231
Ignore-this: 7344652d5e0720af822070d91f03daf9
]
[allmydata/__init__.py: Nicer reporting of unparseable version numbers in dependencies. fixes #1388
david-sarah@jacaranda.org**20110401202750
Ignore-this: 9c6bd599259d2405e1caadbb3e0d8c7f
]
[update FTP-and-SFTP.rst: the necessary patch is included in Twisted-10.1
Brian Warner **20110325232511
Ignore-this: d5307faa6900f143193bfbe14e0f01a
]
[control.py: remove all uses of s.get_serverid()
warner@lothar.com**20110227011203
Ignore-this: f80a787953bd7fa3d40e828bde00e855
]
[web: remove some uses of s.get_serverid(), not all
warner@lothar.com**20110227011159
Ignore-this: a9347d9cf6436537a47edc6efde9f8be
]
[immutable/downloader/fetcher.py: remove all get_serverid() calls
warner@lothar.com**20110227011156
Ignore-this: fb5ef018ade1749348b546ec24f7f09a
]
[immutable/downloader/fetcher.py: fix diversity bug in server-response handling
warner@lothar.com**20110227011153
Ignore-this: bcd62232c9159371ae8a16ff63d22c1b
When blocks terminate (either COMPLETE or CORRUPT/DEAD/BADSEGNUM), the
_shares_from_server dict was being popped incorrectly (using shnum as the
index instead of serverid). I'm still thinking through the consequences of
this bug. It was probably benign and really hard to detect. I think it would
cause us to incorrectly believe that we're pulling too many shares from a
server, and thus prefer a different server rather than asking for a second
share from the first server. The diversity code is intended to spread out the
number of shares simultaneously being requested from each server, but with
this bug, it might be spreading out the total number of shares requested at
all, not just simultaneously. (note that SegmentFetcher is scoped to a single
segment, so the effect doesn't last very long).
]
[immutable/downloader/share.py: reduce get_serverid(), one left, update ext deps
warner@lothar.com**20110227011150
Ignore-this: d8d56dd8e7b280792b40105e13664554
test_download.py: create+check MyShare instances better, make sure they share
Server objects, now that finder.py cares
]
[immutable/downloader/finder.py: reduce use of get_serverid(), one left
warner@lothar.com**20110227011146
Ignore-this: 5785be173b491ae8a78faf5142892020
]
[immutable/offloaded.py: reduce use of get_serverid() a bit more
warner@lothar.com**20110227011142
Ignore-this: b48acc1b2ae1b311da7f3ba4ffba38f
]
[immutable/upload.py: reduce use of get_serverid()
warner@lothar.com**20110227011138
Ignore-this: ffdd7ff32bca890782119a6e9f1495f6
]
[immutable/checker.py: remove some uses of s.get_serverid(), not all
warner@lothar.com**20110227011134
Ignore-this: e480a37efa9e94e8016d826c492f626e
]
[add remaining get_* methods to storage_client.Server, NoNetworkServer, and
warner@lothar.com**20110227011132
Ignore-this: 6078279ddf42b179996a4b53bee8c421
MockIServer stubs
]
[upload.py: rearrange _make_trackers a bit, no behavior changes
warner@lothar.com**20110227011128
Ignore-this: 296d4819e2af452b107177aef6ebb40f
]
[happinessutil.py: finally rename merge_peers to merge_servers
warner@lothar.com**20110227011124
Ignore-this: c8cd381fea1dd888899cb71e4f86de6e
]
[test_upload.py: factor out FakeServerTracker
warner@lothar.com**20110227011120
Ignore-this: 6c182cba90e908221099472cc159325b
]
[test_upload.py: server-vs-tracker cleanup
warner@lothar.com**20110227011115
Ignore-this: 2915133be1a3ba456e8603885437e03
]
[happinessutil.py: server-vs-tracker cleanup
warner@lothar.com**20110227011111
Ignore-this: b856c84033562d7d718cae7cb01085a9
]
[upload.py: more tracker-vs-server cleanup
warner@lothar.com**20110227011107
Ignore-this: bb75ed2afef55e47c085b35def2de315
]
[upload.py: fix var names to avoid confusion between 'trackers' and 'servers'
warner@lothar.com**20110227011103
Ignore-this: 5d5e3415b7d2732d92f42413c25d205d
]
[refactor: s/peer/server/ in immutable/upload, happinessutil.py, test_upload
warner@lothar.com**20110227011100
Ignore-this: 7ea858755cbe5896ac212a925840fe68
No behavioral changes, just updating variable/method names and log messages.
The effects outside these three files should be minimal: some exception
messages changed (to say "server" instead of "peer"), and some internal class
names were changed. A few things still use "peer" to minimize external
changes, like UploadResults.timings["peer_selection"] and
happinessutil.merge_peers, which can be changed later.
]
[storage_client.py: clean up test_add_server/test_add_descriptor, remove .test_servers
warner@lothar.com**20110227011056
Ignore-this: efad933e78179d3d5fdcd6d1ef2b19cc
]
[test_client.py, upload.py:: remove KiB/MiB/etc constants, and other dead code
warner@lothar.com**20110227011051
Ignore-this: dc83c5794c2afc4f81e592f689c0dc2d
]
[test: increase timeout on a network test because Francois's ARM machine hit that timeout
zooko@zooko.com**20110317165909
Ignore-this: 380c345cdcbd196268ca5b65664ac85b
I'm skeptical that the test was proceeding correctly but ran out of time. It seems more likely that it had gotten hung. But if we raise the timeout to an even more extravagant number then we can be even more certain that the test was never going to finish.
]
[docs/configuration.rst: add a "Frontend Configuration" section
Brian Warner **20110222014323
Ignore-this: 657018aa501fe4f0efef9851628444ca
this points to docs/frontends/*.rst, which were previously underlinked
]
[web/filenode.py: avoid calling req.finish() on closed HTTP connections. Closes #1366
"Brian Warner "**20110221061544
Ignore-this: 799d4de19933f2309b3c0c19a63bb888
]
[Add unit tests for cross_check_pkg_resources_versus_import, and a regression test for ref #1355. This requires a little refactoring to make it testable.
david-sarah@jacaranda.org**20110221015817
Ignore-this: 51d181698f8c20d3aca58b057e9c475a
]
[allmydata/__init__.py: .name was used in place of the correct .__name__ when printing an exception. Also, robustify string formatting by using %r instead of %s in some places. fixes #1355.
david-sarah@jacaranda.org**20110221020125
Ignore-this: b0744ed58f161bf188e037bad077fc48
]
[Refactor StorageFarmBroker handling of servers
Brian Warner **20110221015804
Ignore-this: 842144ed92f5717699b8f580eab32a51
Pass around IServer instance instead of (peerid, rref) tuple. Replace
"descriptor" with "server". Other replacements:
get_all_servers -> get_connected_servers/get_known_servers
get_servers_for_index -> get_servers_for_psi (now returns IServers)
This change still needs to be pushed further down: lots of code is now
getting the IServer and then distributing (peerid, rref) internally.
Instead, it ought to distribute the IServer internally and delay
extracting a serverid or rref until the last moment.
no_network.py was updated to retain parallelism.
]
[TAG allmydata-tahoe-1.8.2
warner@lothar.com**20110131020101]
Patch bundle hash:
bf7fdfd73b42a2d3785f2f9045f89e153c9e909f