diff options
-rw-r--r-- | gnu/local.mk | 1 | ||||
-rw-r--r-- | gnu/packages/backup.scm | 19 | ||||
-rw-r--r-- | gnu/packages/patches/borg-fix-hard-link-preloading.patch | 157 |
3 files changed, 14 insertions, 163 deletions
diff --git a/gnu/local.mk b/gnu/local.mk index b0992547b4..9680212c06 100644 --- a/gnu/local.mk +++ b/gnu/local.mk @@ -690,7 +690,6 @@ dist_patch_DATA = \ %D%/packages/patches/blender-2.79-newer-ffmpeg.patch \ %D%/packages/patches/blender-2.79-python-3.7-fix.patch \ %D%/packages/patches/boost-fix-icu-build.patch \ - %D%/packages/patches/borg-fix-hard-link-preloading.patch \ %D%/packages/patches/byobu-writable-status.patch \ %D%/packages/patches/calibre-no-updates-dialog.patch \ %D%/packages/patches/calibre-remove-test-bs4.patch \ diff --git a/gnu/packages/backup.scm b/gnu/packages/backup.scm index 70cc06c736..0bd894f958 100644 --- a/gnu/packages/backup.scm +++ b/gnu/packages/backup.scm @@ -511,15 +511,13 @@ detection, and lossless compression.") (define-public borg (package (name "borg") - (version "1.1.9") + (version "1.1.10") (source (origin (method url-fetch) (uri (pypi-uri "borgbackup" version)) (sha256 - (base32 - "0x95nhv4h34m8cxycbwc4xdz350saaxlgh727b23bgn4ci7gh3vx")) - (patches (search-patches "borg-fix-hard-link-preloading.patch")) + (base32 "1pp70p4n5kamvcbl4d8021ggrxhyykmg9isjg4yd3wags8b19d7g")) (modules '((guix build utils))) (snippet '(begin @@ -541,7 +539,12 @@ detection, and lossless compression.") ;; Remove bundled shared libraries. (with-directory-excursion "src/borg/algorithms" (for-each delete-file-recursively - (list "blake2" "lz4" "zstd"))) + (list "blake2" "lz4" "msgpack" "zstd"))) + ;; Purge some msgpack references from setup.py or the resulting + ;; sources will be unbuildable. + (substitute* "setup.py" + ((".*Extension\\('borg\\.algorithms\\.msgpack\\..*") "") + (("msgpack_packer_source, msgpack_unpacker_source") "")) #t)))) (build-system python-build-system) (arguments @@ -565,6 +568,12 @@ detection, and lossless compression.") ;; HOME=/homeless-shelter. (setenv "HOME" "/tmp") #t))) + (add-after 'unpack 'use-system-msgpack + (lambda _ + (substitute* "src/borg/helpers.py" + (("prefer_system_msgpack = False") + "prefer_system_msgpack = True")) + #t)) ;; The tests need to be run after Borg is installed. (delete 'check) (add-after 'install 'check diff --git a/gnu/packages/patches/borg-fix-hard-link-preloading.patch b/gnu/packages/patches/borg-fix-hard-link-preloading.patch deleted file mode 100644 index 92a4e22674..0000000000 --- a/gnu/packages/patches/borg-fix-hard-link-preloading.patch +++ /dev/null @@ -1,157 +0,0 @@ -Fix a bug that would cause the test suite to hang: - -https://github.com/borgbackup/borg/issues/4350 - -Patch copied from upstream source repository: - -https://github.com/borgbackup/borg/commit/18242ab9e2f26c450b8507aa1d5eceadab8ad027 - -From 18242ab9e2f26c450b8507aa1d5eceadab8ad027 Mon Sep 17 00:00:00 2001 -From: Thomas Waldmann <tw@waldmann-edv.de> -Date: Thu, 2 May 2019 21:02:26 +0200 -Subject: [PATCH] preload chunks for hardlink slaves w/o preloaded master, - fixes #4350 - -also split the hardlink extraction test into 2 tests. - -(cherry picked from commit f33f318d816505161d1449a02ddfdeb97d6fe80a) ---- - src/borg/archive.py | 42 +++++++++++++++++++++++++++++----- - src/borg/archiver.py | 5 ++-- - src/borg/testsuite/archiver.py | 20 +++++++++------- - 3 files changed, 51 insertions(+), 16 deletions(-) - -diff --git a/src/borg/archive.py b/src/borg/archive.py -index adc1f42c..0793672a 100644 ---- a/src/borg/archive.py -+++ b/src/borg/archive.py -@@ -192,7 +192,7 @@ def __init__(self, repository, key): - self.repository = repository - self.key = key - -- def unpack_many(self, ids, filter=None, preload=False): -+ def unpack_many(self, ids, filter=None, partial_extract=False, preload=False, hardlink_masters=None): - """ - Return iterator of items. - -@@ -209,12 +209,40 @@ def unpack_many(self, ids, filter=None, preload=False): - for item in items: - if 'chunks' in item: - item.chunks = [ChunkListEntry(*e) for e in item.chunks] -+ -+ def preload(chunks): -+ self.repository.preload([c.id for c in chunks]) -+ - if filter: - items = [item for item in items if filter(item)] -+ - if preload: -- for item in items: -- if 'chunks' in item: -- self.repository.preload([c.id for c in item.chunks]) -+ if filter and partial_extract: -+ # if we do only a partial extraction, it gets a bit -+ # complicated with computing the preload items: if a hardlink master item is not -+ # selected (== not extracted), we will still need to preload its chunks if a -+ # corresponding hardlink slave is selected (== is extracted). -+ # due to a side effect of the filter() call, we now have hardlink_masters dict populated. -+ masters_preloaded = set() -+ for item in items: -+ if 'chunks' in item: # regular file, maybe a hardlink master -+ preload(item.chunks) -+ # if this is a hardlink master, remember that we already preloaded it: -+ if 'source' not in item and hardlinkable(item.mode) and item.get('hardlink_master', True): -+ masters_preloaded.add(item.path) -+ elif 'source' in item and hardlinkable(item.mode): # hardlink slave -+ source = item.source -+ if source not in masters_preloaded: -+ # we only need to preload *once* (for the 1st selected slave) -+ chunks, _ = hardlink_masters[source] -+ preload(chunks) -+ masters_preloaded.add(source) -+ else: -+ # easy: we do not have a filter, thus all items are selected, thus we need to preload all chunks. -+ for item in items: -+ if 'chunks' in item: -+ preload(item.chunks) -+ - for item in items: - yield item - -@@ -433,8 +461,10 @@ def item_filter(self, item, filter=None): - return False - return filter(item) if filter else True - -- def iter_items(self, filter=None, preload=False): -- for item in self.pipeline.unpack_many(self.metadata.items, preload=preload, -+ def iter_items(self, filter=None, partial_extract=False, preload=False, hardlink_masters=None): -+ assert not (filter and partial_extract and preload) or hardlink_masters is not None -+ for item in self.pipeline.unpack_many(self.metadata.items, partial_extract=partial_extract, -+ preload=preload, hardlink_masters=hardlink_masters, - filter=lambda item: self.item_filter(item, filter)): - yield item - -diff --git a/src/borg/archiver.py b/src/borg/archiver.py -index 957959d6..dcc20455 100644 ---- a/src/borg/archiver.py -+++ b/src/borg/archiver.py -@@ -755,7 +755,8 @@ def peek_and_store_hardlink_masters(item, matched): - else: - pi = None - -- for item in archive.iter_items(filter, preload=True): -+ for item in archive.iter_items(filter, partial_extract=partial_extract, -+ preload=True, hardlink_masters=hardlink_masters): - orig_path = item.path - if strip_components: - item.path = os.sep.join(orig_path.split(os.sep)[strip_components:]) -@@ -997,7 +998,7 @@ def item_to_tarinfo(item, original_path): - return None, stream - return tarinfo, stream - -- for item in archive.iter_items(filter, preload=True): -+ for item in archive.iter_items(filter, preload=True, hardlink_masters=hardlink_masters): - orig_path = item.path - if strip_components: - item.path = os.sep.join(orig_path.split(os.sep)[strip_components:]) -diff --git a/src/borg/testsuite/archiver.py b/src/borg/testsuite/archiver.py -index c35ad800..935b3d79 100644 ---- a/src/borg/testsuite/archiver.py -+++ b/src/borg/testsuite/archiver.py -@@ -823,7 +823,18 @@ def test_mount_hardlinks(self): - assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' - - @requires_hardlinks -- def test_extract_hardlinks(self): -+ def test_extract_hardlinks1(self): -+ self._extract_hardlinks_setup() -+ with changedir('output'): -+ self.cmd('extract', self.repository_location + '::test') -+ assert os.stat('input/source').st_nlink == 4 -+ assert os.stat('input/abba').st_nlink == 4 -+ assert os.stat('input/dir1/hardlink').st_nlink == 4 -+ assert os.stat('input/dir1/subdir/hardlink').st_nlink == 4 -+ assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' -+ -+ @requires_hardlinks -+ def test_extract_hardlinks2(self): - self._extract_hardlinks_setup() - with changedir('output'): - self.cmd('extract', self.repository_location + '::test', '--strip-components', '2') -@@ -839,13 +850,6 @@ def test_extract_hardlinks(self): - assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' - assert os.stat('input/dir1/aaaa').st_nlink == 2 - assert os.stat('input/dir1/source2').st_nlink == 2 -- with changedir('output'): -- self.cmd('extract', self.repository_location + '::test') -- assert os.stat('input/source').st_nlink == 4 -- assert os.stat('input/abba').st_nlink == 4 -- assert os.stat('input/dir1/hardlink').st_nlink == 4 -- assert os.stat('input/dir1/subdir/hardlink').st_nlink == 4 -- assert open('input/dir1/subdir/hardlink', 'rb').read() == b'123456' - - def test_extract_include_exclude(self): - self.cmd('init', '--encryption=repokey', self.repository_location) --- -2.21.0 - |