aboutsummaryrefslogtreecommitdiff
path: root/gnu/packages/patches
diff options
context:
space:
mode:
Diffstat (limited to 'gnu/packages/patches')
-rw-r--r--gnu/packages/patches/audacity-ffmpeg-fallback.patch23
-rw-r--r--gnu/packages/patches/bees-beesd-honor-destdir-on-installation.patch40
-rw-r--r--gnu/packages/patches/ceph-boost-compat.patch18
-rw-r--r--gnu/packages/patches/ceph-rocksdb-compat.patch303
-rw-r--r--gnu/packages/patches/clang-15.0-libc-search-path.patch95
-rw-r--r--gnu/packages/patches/crc32c-unbundle-googletest.patch21
-rw-r--r--gnu/packages/patches/diffoscope-fix-llvm-test.patch28
-rw-r--r--gnu/packages/patches/ecl-16-format-directive-limit.patch83
-rw-r--r--gnu/packages/patches/ecl-16-ignore-stderr-write-error.patch17
-rw-r--r--gnu/packages/patches/ecl-16-libffi.patch16
-rw-r--r--gnu/packages/patches/emacs-libgit-use-system-libgit2.patch88
-rw-r--r--gnu/packages/patches/emacs-native-comp-driver-options.patch17
-rw-r--r--gnu/packages/patches/emacs-telega-path-placeholder.patch41
-rw-r--r--gnu/packages/patches/flashrom-fix-building-on-aarch64.patch89
-rw-r--r--gnu/packages/patches/fp16-implicit-double.patch23
-rw-r--r--gnu/packages/patches/gemmi-fix-pegtl-usage.patch31
-rw-r--r--gnu/packages/patches/gemmi-fix-sajson-types.patch11
-rw-r--r--gnu/packages/patches/ghc-4.patch708
-rw-r--r--gnu/packages/patches/giara-fix-login.patch27
-rw-r--r--gnu/packages/patches/gromacs-tinyxml2.patch40
-rw-r--r--gnu/packages/patches/guile-email-fix-tests.patch35
-rw-r--r--gnu/packages/patches/guile-fibers-fd-finalizer-leak.patch54
-rw-r--r--gnu/packages/patches/icecat-use-older-reveal-hidden-html.patch70
-rw-r--r--gnu/packages/patches/jami-disable-integration-tests.patch111
-rw-r--r--gnu/packages/patches/jami-fix-crash-on-block-contact.patch32
-rw-r--r--gnu/packages/patches/jami-libjami-headers-search.patch109
-rw-r--r--gnu/packages/patches/julia-SOURCE_DATE_EPOCH-mtime.patch5
-rw-r--r--gnu/packages/patches/julia-allow-parallel-build.patch32
-rw-r--r--gnu/packages/patches/julia-tracker-16-compat.patch40
-rw-r--r--gnu/packages/patches/kcontacts-incorrect-country-name.patch85
-rw-r--r--gnu/packages/patches/kde-cli-tools-delay-mime-db.patch26
-rw-r--r--gnu/packages/patches/kodi-increase-test-timeout.patch18
-rw-r--r--gnu/packages/patches/kodi-set-libcurl-ssl-parameters.patch13
-rw-r--r--gnu/packages/patches/kodi-skip-test-449.patch53
-rw-r--r--gnu/packages/patches/kwayland-skip-flaky-test.patch13
-rw-r--r--gnu/packages/patches/libgeotiff-fix-tests-with-proj-9.1.1.patch100
-rw-r--r--gnu/packages/patches/libksysguard-qdiriterator-follow-symlinks.patch24
-rw-r--r--gnu/packages/patches/libunwind-julia-fix-GCC10-fno-common.patch40
-rw-r--r--gnu/packages/patches/linux-libre-infodocs-target.patch88
-rw-r--r--gnu/packages/patches/lirc-reproducible-build.patch69
-rw-r--r--gnu/packages/patches/luajit-no_ldconfig.patch31
-rw-r--r--gnu/packages/patches/memtest86+-build-reproducibly.patch115
-rw-r--r--gnu/packages/patches/mia-vtk-version.patch15
-rw-r--r--gnu/packages/patches/mia-vtk92.patch14
-rw-r--r--gnu/packages/patches/mrustc-riscv64-support.patch48
-rw-r--r--gnu/packages/patches/nautilus-extension-search-path.patch75
-rw-r--r--gnu/packages/patches/oath-toolkit-xmlsec-compat.patch79
-rw-r--r--gnu/packages/patches/openbios-gcc-warnings.patch95
-rw-r--r--gnu/packages/patches/petri-foo-0.1.87-fix-recent-file-not-exist.patch24
-rw-r--r--gnu/packages/patches/picard-fix-id3-rename-test.patch11
-rw-r--r--gnu/packages/patches/pocketfft-cpp-prefer-preprocessor-if.patch109
-rw-r--r--gnu/packages/patches/protobuf-fix-build-on-32bit.patch139
-rw-r--r--gnu/packages/patches/public-inbox-fix-spawn-test.patch43
-rw-r--r--gnu/packages/patches/python-afdko-suppress-copyright-test.patch20
-rw-r--r--gnu/packages/patches/python-apsw-3.39.2.1-test-fix.patch27
-rw-r--r--gnu/packages/patches/python-flask-restful-werkzeug-compat.patch36
-rw-r--r--gnu/packages/patches/python-louvain-fix-test.patch15
-rw-r--r--gnu/packages/patches/python-pypdf-annotate-tests-appropriately.patch96
-rw-r--r--gnu/packages/patches/python-seaborn-2690.patch268
-rw-r--r--gnu/packages/patches/python-seaborn-kde-test.patch36
-rw-r--r--gnu/packages/patches/python-telingo-fix-comparison.patch19
-rw-r--r--gnu/packages/patches/qtwayland-cleanup-callbacks.patch52
-rw-r--r--gnu/packages/patches/qtwayland-dont-recreate-callbacks.patch76
-rw-r--r--gnu/packages/patches/r-mixedpower-r2power.patch26
-rw-r--r--gnu/packages/patches/racket-backport-8.6-cross-install.patch126
-rw-r--r--gnu/packages/patches/racket-backport-8.6-docindex-write.patch36
-rw-r--r--gnu/packages/patches/racket-backport-8.6-hurd.patch609
-rw-r--r--gnu/packages/patches/racket-backport-8.6-zuo.patch481
-rw-r--r--gnu/packages/patches/racket-backport-8.7-pkg-strip.patch90
-rw-r--r--gnu/packages/patches/rdkit-unbundle-external-dependencies.patch384
-rw-r--r--gnu/packages/patches/rottlog-direntry.patch18
-rw-r--r--gnu/packages/patches/ruby-sanitize-system-libxml.patch38
-rw-r--r--gnu/packages/patches/rust-1.64-fix-riscv64-bootstrap.patch565
-rw-r--r--gnu/packages/patches/rust-shell2batch-lint-fix.patch25
-rw-r--r--gnu/packages/patches/rw-igraph-0.10.patch17
-rw-r--r--gnu/packages/patches/sajson-build-with-gcc10.patch45
-rw-r--r--gnu/packages/patches/sajson-for-gemmi-numbers-as-strings.patch195
-rw-r--r--gnu/packages/patches/scotch-build-parallelism.patch39
-rw-r--r--gnu/packages/patches/scotch-integer-declarations.patch37
-rw-r--r--gnu/packages/patches/spectre-meltdown-checker-externalize-fwdb.patch244
-rw-r--r--gnu/packages/patches/spectre-meltdown-checker-find-kernel.patch26
-rw-r--r--gnu/packages/patches/sssd-optional-systemd.patch45
-rw-r--r--gnu/packages/patches/tbb-fix-test-on-aarch64.patch35
-rw-r--r--gnu/packages/patches/tbb-other-arches.patch8
-rw-r--r--gnu/packages/patches/telegram-desktop-allow-disable-libtgvoip.patch125
-rw-r--r--gnu/packages/patches/timescaledb-flaky-test.patch107
-rw-r--r--gnu/packages/patches/timewarrior-time-sensitive-tests.patch163
-rw-r--r--gnu/packages/patches/tinydir-fix-cbehave-test.patch16
-rw-r--r--gnu/packages/patches/u-boot-allow-disabling-openssl.patch66
-rw-r--r--gnu/packages/patches/u-boot-infodocs-target.patch84
-rw-r--r--gnu/packages/patches/u-boot-patman-guix-integration.patch1244
-rw-r--r--gnu/packages/patches/ultrastar-deluxe-no-freesans.patch31
-rw-r--r--gnu/packages/patches/upx-CVE-2021-20285.patch76
-rw-r--r--gnu/packages/patches/vtk-fix-freetypetools-build-failure.patch32
-rw-r--r--gnu/packages/patches/wacomtablet-add-missing-includes.patch11
-rw-r--r--gnu/packages/patches/wacomtablet-qt5.15.patch23
-rw-r--r--gnu/packages/patches/wdl-link-libs-and-fix-jnetlib.patch53
-rw-r--r--gnu/packages/patches/webrtc-for-telegram-desktop-fix-gcc12-cstdint.patch21
98 files changed, 5406 insertions, 3914 deletions
diff --git a/gnu/packages/patches/audacity-ffmpeg-fallback.patch b/gnu/packages/patches/audacity-ffmpeg-fallback.patch
index b78956070a..d18583c9bd 100644
--- a/gnu/packages/patches/audacity-ffmpeg-fallback.patch
+++ b/gnu/packages/patches/audacity-ffmpeg-fallback.patch
@@ -8,13 +8,11 @@ it. This dynamic loading mechanism fails to properly locate libraries outside
of LD_LIBRARY_PATH.
See <https://issues.guix.gnu.org/53591>.
----
- libraries/lib-ffmpeg-support/CMakeLists.txt | 8 ++++++++
- libraries/lib-ffmpeg-support/FFmpegFunctions.cpp | 12 ++++++++++++
- 2 files changed, 20 insertions(+)
+
+Bugs-added-by: Marius Bakke <marius@gnu.org>
diff --git a/libraries/lib-ffmpeg-support/CMakeLists.txt b/libraries/lib-ffmpeg-support/CMakeLists.txt
-index 8c5f06d7c..00810e4d0 100644
+index b8803a1f5..f86559cca 100644
--- a/libraries/lib-ffmpeg-support/CMakeLists.txt
+++ b/libraries/lib-ffmpeg-support/CMakeLists.txt
@@ -1,5 +1,7 @@
@@ -25,7 +23,7 @@ index 8c5f06d7c..00810e4d0 100644
set( SOURCES
FFmpegTypes.h
-@@ -100,6 +102,12 @@ if (${_OPT}use_ffmpeg)
+@@ -107,6 +109,12 @@ if (${_OPT}use_ffmpeg)
list(APPEND DEFINITIONS PRIVATE _DARWIN_C_SOURCE )
endif()
@@ -39,10 +37,10 @@ index 8c5f06d7c..00810e4d0 100644
"${DEFINITIONS}" ""
)
diff --git a/libraries/lib-ffmpeg-support/FFmpegFunctions.cpp b/libraries/lib-ffmpeg-support/FFmpegFunctions.cpp
-index 66d085a0b..4eeb4aed3 100644
+index 55d6f676b..46640ef83 100644
--- a/libraries/lib-ffmpeg-support/FFmpegFunctions.cpp
+++ b/libraries/lib-ffmpeg-support/FFmpegFunctions.cpp
-@@ -238,6 +238,18 @@ struct FFmpegFunctions::Private final
+@@ -246,6 +246,18 @@ struct FFmpegFunctions::Private final
if (library->IsLoaded())
return library;
@@ -58,9 +56,6 @@ index 66d085a0b..4eeb4aed3 100644
+ }
+#endif
+
- // Loading has failed.
- // wxLogSysError doesn't report errors correctly on *NIX
- #if defined(_WIN32)
---
-2.34.0
-
+ return {};
+ }
+ };
diff --git a/gnu/packages/patches/bees-beesd-honor-destdir-on-installation.patch b/gnu/packages/patches/bees-beesd-honor-destdir-on-installation.patch
new file mode 100644
index 0000000000..93817f42cf
--- /dev/null
+++ b/gnu/packages/patches/bees-beesd-honor-destdir-on-installation.patch
@@ -0,0 +1,40 @@
+From 66b00f8a972ebb4da68f7aa0d0656f43ce2a2c3a Mon Sep 17 00:00:00 2001
+From: Hilton Chain <hako@ultrarare.space>
+Date: Fri, 23 Dec 2022 11:04:46 +0800
+Subject: [PATCH] beesd: Honor DESTDIR on installation.
+
+Co-authored-by: Adam Faiz <adam.faiz@disroot.org>
+Signed-off-by: Hilton Chain <hako@ultrarare.space>
+---
+ Defines.mk | 1 +
+ scripts/beesd.in | 2 +-
+ 2 files changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/Defines.mk b/Defines.mk
+index 9e8df40..e5394ba 100644
+--- a/Defines.mk
++++ b/Defines.mk
+@@ -2,6 +2,7 @@ MAKE += PREFIX=$(PREFIX) LIBEXEC_PREFIX=$(LIBEXEC_PREFIX) ETC_PREFIX=$(ETC_PREFI
+
+ define TEMPLATE_COMPILER =
+ sed $< >$@ \
++ -e's#@DESTDIR@#$(DESTDIR)#' \
+ -e's#@PREFIX@#$(PREFIX)#' \
+ -e's#@ETC_PREFIX@#$(ETC_PREFIX)#' \
+ -e's#@LIBEXEC_PREFIX@#$(LIBEXEC_PREFIX)#'
+diff --git a/scripts/beesd.in b/scripts/beesd.in
+index 174bb6c..35d04aa 100755
+--- a/scripts/beesd.in
++++ b/scripts/beesd.in
+@@ -15,7 +15,7 @@ readonly AL128K="$((128*1024))"
+ readonly AL16M="$((16*1024*1024))"
+ readonly CONFIG_DIR=@ETC_PREFIX@/bees/
+
+-readonly bees_bin=$(realpath @LIBEXEC_PREFIX@/bees)
++readonly bees_bin=$(realpath @DESTDIR@/@LIBEXEC_PREFIX@/bees)
+
+ command -v "$bees_bin" &> /dev/null || ERRO "Missing 'bees' agent"
+
+--
+2.38.1
+
diff --git a/gnu/packages/patches/ceph-boost-compat.patch b/gnu/packages/patches/ceph-boost-compat.patch
deleted file mode 100644
index 1aecfbbed5..0000000000
--- a/gnu/packages/patches/ceph-boost-compat.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-Add extra includes required for Boost 1.75 and later.
-
-Taken from upstram:
-
- https://github.com/ceph/ceph/commit/ebf3a0398f18eab67d2ba25e6a10b41ff140f6a4
-
-diff --git a/src/rgw/rgw_string.h b/src/rgw/rgw_string.h
-index 257daa9c1fe6e..90e64f98a2587 100644
---- a/src/rgw/rgw_string.h
-+++ b/src/rgw/rgw_string.h
-@@ -8,5 +8,7 @@
- #include <stdlib.h>
- #include <limits.h>
- #include <string_view>
-+#include <string>
-+#include <stdexcept>
-
- #include <boost/container/small_vector.hpp>
diff --git a/gnu/packages/patches/ceph-rocksdb-compat.patch b/gnu/packages/patches/ceph-rocksdb-compat.patch
deleted file mode 100644
index 9fb9b0caeb..0000000000
--- a/gnu/packages/patches/ceph-rocksdb-compat.patch
+++ /dev/null
@@ -1,303 +0,0 @@
-Adjust for newer versions of RocksDB.
-
-Taken from upstream:
-
- https://github.com/ceph/ceph/pull/42815
- https://github.com/ceph/ceph/commit/ff7f192ea3cf88ca1098bcf9396ff4f8ed1e8792.diff
-
-diff --git a/src/kv/rocksdb_cache/BinnedLRUCache.cc b/src/kv/rocksdb_cache/BinnedLRUCache.cc
-index 0d657883e92de..47c56e2ddd769 100644
---- a/src/kv/rocksdb_cache/BinnedLRUCache.cc
-+++ b/src/kv/rocksdb_cache/BinnedLRUCache.cc
-@@ -151,13 +151,20 @@ void BinnedLRUCacheShard::EraseUnRefEntries() {
- }
- }
-
--void BinnedLRUCacheShard::ApplyToAllCacheEntries(void (*callback)(void*, size_t),
-- bool thread_safe) {
-+void BinnedLRUCacheShard::ApplyToAllCacheEntries(
-+ const std::function<void(const rocksdb::Slice& key,
-+ void* value,
-+ size_t charge,
-+ DeleterFn)>& callback,
-+ bool thread_safe)
-+{
- if (thread_safe) {
- mutex_.lock();
- }
- table_.ApplyToAllCacheEntries(
-- [callback](BinnedLRUHandle* h) { callback(h->value, h->charge); });
-+ [callback](BinnedLRUHandle* h) {
-+ callback(h->key(), h->value, h->charge, h->deleter);
-+ });
- if (thread_safe) {
- mutex_.unlock();
- }
-@@ -345,7 +352,7 @@ bool BinnedLRUCacheShard::Release(rocksdb::Cache::Handle* handle, bool force_era
-
- rocksdb::Status BinnedLRUCacheShard::Insert(const rocksdb::Slice& key, uint32_t hash, void* value,
- size_t charge,
-- void (*deleter)(const rocksdb::Slice& key, void* value),
-+ DeleterFn deleter,
- rocksdb::Cache::Handle** handle, rocksdb::Cache::Priority priority) {
- auto e = new BinnedLRUHandle();
- rocksdb::Status s;
-@@ -464,6 +471,12 @@ std::string BinnedLRUCacheShard::GetPrintableOptions() const {
- return std::string(buffer);
- }
-
-+DeleterFn BinnedLRUCacheShard::GetDeleter(rocksdb::Cache::Handle* h) const
-+{
-+ auto* handle = reinterpret_cast<BinnedLRUHandle*>(h);
-+ return handle->deleter;
-+}
-+
- BinnedLRUCache::BinnedLRUCache(CephContext *c,
- size_t capacity,
- int num_shard_bits,
-@@ -519,6 +532,13 @@ void BinnedLRUCache::DisownData() {
- #endif // !__SANITIZE_ADDRESS__
- }
-
-+#if (ROCKSDB_MAJOR >= 6 && ROCKSDB_MINOR >= 22)
-+DeleterFn BinnedLRUCache::GetDeleter(Handle* handle) const
-+{
-+ return reinterpret_cast<const BinnedLRUHandle*>(handle)->deleter;
-+}
-+#endif
-+
- size_t BinnedLRUCache::TEST_GetLRUSize() {
- size_t lru_size_of_all_shards = 0;
- for (int i = 0; i < num_shards_; i++) {
-diff --git a/src/kv/rocksdb_cache/BinnedLRUCache.h b/src/kv/rocksdb_cache/BinnedLRUCache.h
-index 85608be0e5734..88bf4502e8927 100644
---- a/src/kv/rocksdb_cache/BinnedLRUCache.h
-+++ b/src/kv/rocksdb_cache/BinnedLRUCache.h
-@@ -56,7 +56,7 @@ std::shared_ptr<rocksdb::Cache> NewBinnedLRUCache(
-
- struct BinnedLRUHandle {
- void* value;
-- void (*deleter)(const rocksdb::Slice&, void* value);
-+ DeleterFn deleter;
- BinnedLRUHandle* next_hash;
- BinnedLRUHandle* next;
- BinnedLRUHandle* prev;
-@@ -189,7 +189,7 @@ class alignas(CACHE_LINE_SIZE) BinnedLRUCacheShard : public CacheShard {
- // Like Cache methods, but with an extra "hash" parameter.
- virtual rocksdb::Status Insert(const rocksdb::Slice& key, uint32_t hash, void* value,
- size_t charge,
-- void (*deleter)(const rocksdb::Slice& key, void* value),
-+ DeleterFn deleter,
- rocksdb::Cache::Handle** handle,
- rocksdb::Cache::Priority priority) override;
- virtual rocksdb::Cache::Handle* Lookup(const rocksdb::Slice& key, uint32_t hash) override;
-@@ -205,13 +205,19 @@ class alignas(CACHE_LINE_SIZE) BinnedLRUCacheShard : public CacheShard {
- virtual size_t GetUsage() const override;
- virtual size_t GetPinnedUsage() const override;
-
-- virtual void ApplyToAllCacheEntries(void (*callback)(void*, size_t),
-- bool thread_safe) override;
-+ virtual void ApplyToAllCacheEntries(
-+ const std::function<void(const rocksdb::Slice& key,
-+ void* value,
-+ size_t charge,
-+ DeleterFn)>& callback,
-+ bool thread_safe) override;
-
- virtual void EraseUnRefEntries() override;
-
- virtual std::string GetPrintableOptions() const override;
-
-+ virtual DeleterFn GetDeleter(rocksdb::Cache::Handle* handle) const override;
-+
- void TEST_GetLRUList(BinnedLRUHandle** lru, BinnedLRUHandle** lru_low_pri);
-
- // Retrieves number of elements in LRU, for unit test purpose only
-@@ -304,7 +310,9 @@ class BinnedLRUCache : public ShardedCache {
- virtual size_t GetCharge(Handle* handle) const override;
- virtual uint32_t GetHash(Handle* handle) const override;
- virtual void DisownData() override;
--
-+#if (ROCKSDB_MAJOR >= 6 && ROCKSDB_MINOR >= 22)
-+ virtual DeleterFn GetDeleter(Handle* handle) const override;
-+#endif
- // Retrieves number of elements in LRU, for unit test purpose only
- size_t TEST_GetLRUSize();
- // Sets the high pri pool ratio
-diff --git a/src/kv/rocksdb_cache/ShardedCache.cc b/src/kv/rocksdb_cache/ShardedCache.cc
-index 367140a94d8be..6cbd89ad6472c 100644
---- a/src/kv/rocksdb_cache/ShardedCache.cc
-+++ b/src/kv/rocksdb_cache/ShardedCache.cc
-@@ -44,7 +44,7 @@ void ShardedCache::SetStrictCapacityLimit(bool strict_capacity_limit) {
- }
-
- rocksdb::Status ShardedCache::Insert(const rocksdb::Slice& key, void* value, size_t charge,
-- void (*deleter)(const rocksdb::Slice& key, void* value),
-+ DeleterFn deleter,
- rocksdb::Cache::Handle** handle, Priority priority) {
- uint32_t hash = HashSlice(key);
- return GetShard(Shard(hash))
-@@ -109,13 +109,36 @@ size_t ShardedCache::GetPinnedUsage() const {
- return usage;
- }
-
-+#if (ROCKSDB_MAJOR >= 6 && ROCKSDB_MINOR >= 22)
-+DeleterFn ShardedCache::GetDeleter(Handle* handle) const
-+{
-+ uint32_t hash = GetHash(handle);
-+ return GetShard(Shard(hash))->GetDeleter(handle);
-+}
-+
-+void ShardedCache::ApplyToAllEntries(
-+ const std::function<void(const rocksdb::Slice& key, void* value, size_t charge,
-+ DeleterFn deleter)>& callback,
-+ const ApplyToAllEntriesOptions& opts)
-+{
-+ int num_shards = 1 << num_shard_bits_;
-+ for (int s = 0; s < num_shards; s++) {
-+ GetShard(s)->ApplyToAllCacheEntries(callback, true /* thread_safe */);
-+ }
-+}
-+#else
- void ShardedCache::ApplyToAllCacheEntries(void (*callback)(void*, size_t),
- bool thread_safe) {
- int num_shards = 1 << num_shard_bits_;
- for (int s = 0; s < num_shards; s++) {
-- GetShard(s)->ApplyToAllCacheEntries(callback, thread_safe);
-+ GetShard(s)->ApplyToAllCacheEntries(
-+ [callback](const rocksdb::Slice&, void* value, size_t charge, DeleterFn) {
-+ callback(value, charge);
-+ },
-+ thread_safe);
- }
- }
-+#endif
-
- void ShardedCache::EraseUnRefEntries() {
- int num_shards = 1 << num_shard_bits_;
-@@ -131,7 +154,7 @@ std::string ShardedCache::GetPrintableOptions() const {
- char buffer[kBufferSize];
- {
- std::lock_guard<std::mutex> l(capacity_mutex_);
-- snprintf(buffer, kBufferSize, " capacity : %" ROCKSDB_PRIszt "\n",
-+ snprintf(buffer, kBufferSize, " capacity : %zu\n",
- capacity_);
- ret.append(buffer);
- snprintf(buffer, kBufferSize, " num_shard_bits : %d\n", num_shard_bits_);
-diff --git a/src/kv/rocksdb_cache/ShardedCache.h b/src/kv/rocksdb_cache/ShardedCache.h
-index 4d64893ab1c7b..f98421a09a33a 100644
---- a/src/kv/rocksdb_cache/ShardedCache.h
-+++ b/src/kv/rocksdb_cache/ShardedCache.h
-@@ -14,6 +14,7 @@
- #include <string>
- #include <mutex>
-
-+#include "rocksdb/version.h"
- #include "rocksdb/cache.h"
- #include "include/ceph_hash.h"
- #include "common/PriorityCache.h"
-@@ -22,10 +23,11 @@
- #ifndef CACHE_LINE_SIZE
- #define CACHE_LINE_SIZE 64 // XXX arch-specific define
- #endif
--#define ROCKSDB_PRIszt "zu"
-
- namespace rocksdb_cache {
-
-+using DeleterFn = void (*)(const rocksdb::Slice& key, void* value);
-+
- // Single cache shard interface.
- class CacheShard {
- public:
-@@ -34,7 +36,7 @@ class CacheShard {
-
- virtual rocksdb::Status Insert(const rocksdb::Slice& key, uint32_t hash, void* value,
- size_t charge,
-- void (*deleter)(const rocksdb::Slice& key, void* value),
-+ DeleterFn deleter,
- rocksdb::Cache::Handle** handle, rocksdb::Cache::Priority priority) = 0;
- virtual rocksdb::Cache::Handle* Lookup(const rocksdb::Slice& key, uint32_t hash) = 0;
- virtual bool Ref(rocksdb::Cache::Handle* handle) = 0;
-@@ -44,10 +46,15 @@ class CacheShard {
- virtual void SetStrictCapacityLimit(bool strict_capacity_limit) = 0;
- virtual size_t GetUsage() const = 0;
- virtual size_t GetPinnedUsage() const = 0;
-- virtual void ApplyToAllCacheEntries(void (*callback)(void*, size_t),
-- bool thread_safe) = 0;
-+ virtual void ApplyToAllCacheEntries(
-+ const std::function<void(const rocksdb::Slice& key,
-+ void* value,
-+ size_t charge,
-+ DeleterFn)>& callback,
-+ bool thread_safe) = 0;
- virtual void EraseUnRefEntries() = 0;
- virtual std::string GetPrintableOptions() const { return ""; }
-+ virtual DeleterFn GetDeleter(rocksdb::Cache::Handle* handle) const = 0;
- };
-
- // Generic cache interface which shards cache by hash of keys. 2^num_shard_bits
-@@ -57,34 +64,43 @@ class ShardedCache : public rocksdb::Cache, public PriorityCache::PriCache {
- public:
- ShardedCache(size_t capacity, int num_shard_bits, bool strict_capacity_limit);
- virtual ~ShardedCache() = default;
-+ // rocksdb::Cache
- virtual const char* Name() const override = 0;
-- virtual CacheShard* GetShard(int shard) = 0;
-- virtual const CacheShard* GetShard(int shard) const = 0;
-- virtual void* Value(Handle* handle) override = 0;
-- virtual size_t GetCharge(Handle* handle) const = 0;
-- virtual uint32_t GetHash(Handle* handle) const = 0;
-- virtual void DisownData() override = 0;
--
-- virtual void SetCapacity(size_t capacity) override;
-- virtual void SetStrictCapacityLimit(bool strict_capacity_limit) override;
--
- virtual rocksdb::Status Insert(const rocksdb::Slice& key, void* value, size_t charge,
-- void (*deleter)(const rocksdb::Slice& key, void* value),
-+ DeleterFn,
- rocksdb::Cache::Handle** handle, Priority priority) override;
- virtual rocksdb::Cache::Handle* Lookup(const rocksdb::Slice& key, rocksdb::Statistics* stats) override;
- virtual bool Ref(rocksdb::Cache::Handle* handle) override;
- virtual bool Release(rocksdb::Cache::Handle* handle, bool force_erase = false) override;
-+ virtual void* Value(Handle* handle) override = 0;
- virtual void Erase(const rocksdb::Slice& key) override;
- virtual uint64_t NewId() override;
-- virtual size_t GetCapacity() const override;
-+ virtual void SetCapacity(size_t capacity) override;
-+ virtual void SetStrictCapacityLimit(bool strict_capacity_limit) override;
- virtual bool HasStrictCapacityLimit() const override;
-+ virtual size_t GetCapacity() const override;
- virtual size_t GetUsage() const override;
- virtual size_t GetUsage(rocksdb::Cache::Handle* handle) const override;
- virtual size_t GetPinnedUsage() const override;
-+ virtual size_t GetCharge(Handle* handle) const = 0;
-+#if (ROCKSDB_MAJOR >= 6 && ROCKSDB_MINOR >= 22)
-+ virtual DeleterFn GetDeleter(Handle* handle) const override;
-+#endif
-+ virtual void DisownData() override = 0;
-+#if (ROCKSDB_MAJOR >= 6 && ROCKSDB_MINOR >= 22)
-+ virtual void ApplyToAllEntries(
-+ const std::function<void(const rocksdb::Slice& key, void* value, size_t charge,
-+ DeleterFn deleter)>& callback,
-+ const ApplyToAllEntriesOptions& opts) override;
-+#else
- virtual void ApplyToAllCacheEntries(void (*callback)(void*, size_t),
- bool thread_safe) override;
-+#endif
- virtual void EraseUnRefEntries() override;
- virtual std::string GetPrintableOptions() const override;
-+ virtual CacheShard* GetShard(int shard) = 0;
-+ virtual const CacheShard* GetShard(int shard) const = 0;
-+ virtual uint32_t GetHash(Handle* handle) const = 0;
-
- int GetNumShardBits() const { return num_shard_bits_; }
-
-@@ -120,7 +136,7 @@ class ShardedCache : public rocksdb::Cache, public PriorityCache::PriCache {
- // return Hash(s.data(), s.size(), 0);
- }
-
-- uint32_t Shard(uint32_t hash) {
-+ uint32_t Shard(uint32_t hash) const {
- // Note, hash >> 32 yields hash in gcc, not the zero we expect!
- return (num_shard_bits_ > 0) ? (hash >> (32 - num_shard_bits_)) : 0;
- }
diff --git a/gnu/packages/patches/clang-15.0-libc-search-path.patch b/gnu/packages/patches/clang-15.0-libc-search-path.patch
new file mode 100644
index 0000000000..20726859b2
--- /dev/null
+++ b/gnu/packages/patches/clang-15.0-libc-search-path.patch
@@ -0,0 +1,95 @@
+Clang attempts to guess file names based on the OS and distro (yes!),
+but unfortunately, that doesn't work for us.
+
+This patch makes it easy to insert libc's $libdir so that Clang passes the
+correct absolute file name of crt1.o etc. to 'ld'. It also disables all
+the distro-specific stuff and removes the hard-coded FHS directory names
+to make sure Clang also works on foreign distros.
+
+diff --git a/clang/lib/Driver/Distro.cpp b/clang/libDriver/Distro.cpp
+index 1898667..35de813 100644
+--- a/clang/lib/Driver/Distro.cpp
++++ b/clang/libDriver/Distro.cpp
+@@ -97,6 +97,10 @@ static Distro::DistroType DetectLsbRelease(llvm::vfs::FileSystem &VFS) {
+ }
+
+ static Distro::DistroType DetectDistro(llvm::vfs::FileSystem &VFS) {
++ // The compiler should always behave the same, even when used via Guix on a
++ // foreign distro.
++ return Distro::UnknownDistro;
++
+ Distro::DistroType Version = Distro::UnknownDistro;
+
+ // Newer freedesktop.org's compilant systemd-based systems
+diff --git a/clang/lib/Driver/ToolChains/Cuda.cpp b/clang/libDriver/ToolChains/Cuda.cpp
+index 7ad990d..e4da4d4 100644
+--- a/clang/lib/Driver/ToolChains/Cuda.cpp
++++ b/clang/libDriver/ToolChains/Cuda.cpp
+@@ -117,6 +117,9 @@ CudaInstallationDetector::CudaInstallationDetector(
+ const Driver &D, const llvm::Triple &HostTriple,
+ const llvm::opt::ArgList &Args)
+ : D(D) {
++ // Don't look for CUDA in /usr.
++ return;
++
+ struct Candidate {
+ std::string Path;
+ bool StrictChecking;
+diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/libDriver/ToolChains/Linux.cpp
+index ceb1a98..9d7a14a 100644
+--- a/clang/lib/Driver/ToolChains/Linux.cpp
++++ b/clang/libDriver/ToolChains/Linux.cpp
+@@ -188,6 +188,10 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
+
+ Generic_GCC::PushPPaths(PPaths);
+
++ // Comment out the distro-specific tweaks so that they don't bite when
++ // using Guix on a foreign distro.
++#if 0
++
+ Distro Distro(D.getVFS(), Triple);
+
+ if (Distro.IsAlpineLinux() || Triple.isAndroid()) {
+@@ -256,6 +260,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
+
+ if (IsAndroid || Distro.IsOpenSUSE())
+ ExtraOpts.push_back("--enable-new-dtags");
++#endif
+
+ // The selection of paths to try here is designed to match the patterns which
+ // the GCC driver itself uses, as this is part of the GCC-compatible driver.
+@@ -276,6 +281,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
+ }
+ Generic_GCC::AddMultilibPaths(D, SysRoot, OSLibDir, MultiarchTriple, Paths);
+
++#if 0
+ addPathIfExists(D, concat(SysRoot, "/lib", MultiarchTriple), Paths);
+ addPathIfExists(D, concat(SysRoot, "/lib/..", OSLibDir), Paths);
+
+@@ -304,9 +310,11 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
+ addPathIfExists(D, concat(SysRoot, "/", OSLibDir, ABIName), Paths);
+ addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir, ABIName), Paths);
+ }
++#endif
+
+ Generic_GCC::AddMultiarchPaths(D, SysRoot, OSLibDir, Paths);
+
++#if 0
+ // The deprecated -DLLVM_ENABLE_PROJECTS=libcxx configuration installs
+ // libc++.so in D.Dir+"/../lib/". Detect this path.
+ // TODO Remove once LLVM_ENABLE_PROJECTS=libcxx is unsupported.
+@@ -316,6 +324,14 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
+
+ addPathIfExists(D, concat(SysRoot, "/lib"), Paths);
+ addPathIfExists(D, concat(SysRoot, "/usr/lib"), Paths);
++#endif
++
++ // Add libc's lib/ directory to the search path, so that crt1.o, crti.o,
++ // and friends can be found.
++ addPathIfExists(D, "@GLIBC_LIBDIR@", Paths);
++
++ // Add GCC's lib/ directory so libstdc++.so can be found.
++ addPathIfExists(D, GCCInstallation.getParentLibPath(), Paths);
+ }
+
+ ToolChain::RuntimeLibType Linux::GetDefaultRuntimeLibType() const {
diff --git a/gnu/packages/patches/crc32c-unbundle-googletest.patch b/gnu/packages/patches/crc32c-unbundle-googletest.patch
new file mode 100644
index 0000000000..da513c5d4e
--- /dev/null
+++ b/gnu/packages/patches/crc32c-unbundle-googletest.patch
@@ -0,0 +1,21 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 8490728..c7f0952 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -315,15 +315,7 @@ if(CRC32C_BUILD_TESTS)
+ set(install_gmock OFF)
+
+ # This project is tested using GoogleTest.
+- add_subdirectory("third_party/googletest")
+-
+- # GoogleTest triggers a missing field initializers warning.
+- if(CRC32C_HAVE_NO_MISSING_FIELD_INITIALIZERS)
+- set_property(TARGET gtest
+- APPEND PROPERTY COMPILE_OPTIONS -Wno-missing-field-initializers)
+- set_property(TARGET gmock
+- APPEND PROPERTY COMPILE_OPTIONS -Wno-missing-field-initializers)
+- endif(CRC32C_HAVE_NO_MISSING_FIELD_INITIALIZERS)
++ find_package(GTest REQUIRED)
+
+ add_executable(crc32c_tests "")
+ target_sources(crc32c_tests
diff --git a/gnu/packages/patches/diffoscope-fix-llvm-test.patch b/gnu/packages/patches/diffoscope-fix-llvm-test.patch
deleted file mode 100644
index 45e496a128..0000000000
--- a/gnu/packages/patches/diffoscope-fix-llvm-test.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From b7eeac09eb068083bdee1a3aa062d1e52a2fa61a Mon Sep 17 00:00:00 2001
-From: Tobias Geerinckx-Rice <me@tobias.gr>
-Date: Mon, 4 Oct 2021 21:03:43 +0200
-Subject: [PATCH] gnu: diffoscope: Fix test_item3_deflate_llvm_bitcode.
-
-Taken verbatim from Nixpkgs[0].
-
-[0]: https://github.com/NixOS/nixpkgs/blob/589e03f109092a3ba97781fd0533110bf78a3f97/pkgs/tools/misc/diffoscope/fix-tests.patch
----
- tests/comparators/test_rlib.py | 3 ---
- 1 file changed, 3 deletions(-)
-
-diff --git a/tests/comparators/test_rlib.py b/tests/comparators/test_rlib.py
-index 8d201ab..05960aa 100644
---- a/tests/comparators/test_rlib.py
-+++ b/tests/comparators/test_rlib.py
-@@ -81,9 +81,6 @@ def rlib_dis_expected_diff():
- if actual_ver >= "7.0":
- diff_file = "rlib_llvm_dis_expected_diff_7"
-
-- if actual_ver >= "10.0":
-- diff_file = "rlib_llvm_dis_expected_diff_10"
--
- return get_data(diff_file)
-
-
---
-2.33.0
diff --git a/gnu/packages/patches/ecl-16-format-directive-limit.patch b/gnu/packages/patches/ecl-16-format-directive-limit.patch
deleted file mode 100644
index 237db92722..0000000000
--- a/gnu/packages/patches/ecl-16-format-directive-limit.patch
+++ /dev/null
@@ -1,83 +0,0 @@
-Patch backported by Sage.
-
-Fix from upstream that happens to work around
-https://trac.sagemath.org/ticket/23011
-diff --git a/src/lsp/format.lsp b/src/lsp/format.lsp
-index 77ca799..53b887c 100644
---- a/src/lsp/format.lsp
-+++ b/src/lsp/format.lsp
-@@ -307,11 +307,13 @@
- :start (format-directive-start struct)
- :end (format-directive-end struct))))
-
-+(defconstant +format-directive-limit+ (1+ (char-code #\~)))
-+
- #+formatter
- (defparameter *format-directive-expanders*
-- (make-array char-code-limit :initial-element nil))
-+ (make-array +format-directive-limit+ :initial-element nil))
- (defparameter *format-directive-interpreters*
-- (make-array char-code-limit :initial-element nil))
-+ (make-array +format-directive-limit+ :initial-element nil))
-
- (defparameter *default-format-error-control-string* nil)
- (defparameter *default-format-error-offset* nil)
-@@ -550,24 +552,24 @@
- (write-string directive stream)
- (interpret-directive-list stream (cdr directives) orig-args args))
- (#-ecl format-directive #+ecl vector
-+ (multiple-value-bind
-+ (new-directives new-args)
-+ (let* ((code (char-code (format-directive-character directive)))
-+ (function
-+ (and (< code +format-directive-limit+)
-+ (svref *format-directive-interpreters* code)))
-+ (*default-format-error-offset*
-+ (1- (format-directive-end directive))))
-+ (unless function
-+ (error 'format-error
-+ :complaint "Unknown format directive."))
- (multiple-value-bind
- (new-directives new-args)
-- (let ((function
-- (svref *format-directive-interpreters*
-- (char-code (format-directive-character
-- directive))))
-- (*default-format-error-offset*
-- (1- (format-directive-end directive))))
-- (unless function
-- (error 'format-error
-- :complaint "Unknown format directive."))
-- (multiple-value-bind
-- (new-directives new-args)
-- (funcall function stream directive
-- (cdr directives) orig-args args)
-- (values new-directives new-args)))
-- (interpret-directive-list stream new-directives
-- orig-args new-args)))))
-+ (funcall function stream directive
-+ (cdr directives) orig-args args)
-+ (values new-directives new-args)))
-+ (interpret-directive-list stream new-directives
-+ orig-args new-args)))))
- args))
-
-
-@@ -639,11 +641,12 @@
- (values `(write-string ,directive stream)
- more-directives))
- (format-directive
-- (let ((expander
-- (aref *format-directive-expanders*
-- (char-code (format-directive-character directive))))
-- (*default-format-error-offset*
-- (1- (format-directive-end directive))))
-+ (let* ((code (char-code (format-directive-character directive)))
-+ (expander
-+ (and (< code +format-directive-limit+)
-+ (svref *format-directive-expanders* code)))
-+ (*default-format-error-offset*
-+ (1- (format-directive-end directive))))
- (if expander
- (funcall expander directive more-directives)
- (error 'format-error
diff --git a/gnu/packages/patches/ecl-16-ignore-stderr-write-error.patch b/gnu/packages/patches/ecl-16-ignore-stderr-write-error.patch
deleted file mode 100644
index 42d213c0e9..0000000000
--- a/gnu/packages/patches/ecl-16-ignore-stderr-write-error.patch
+++ /dev/null
@@ -1,17 +0,0 @@
-Patch adapted from Sage.
-diff -Naur ecl-16.1.2.orig/src/c/file.d ecl-16.1.2/src/c/file.d
---- ecl-16.1.2.orig/src/c/file.d 2016-05-11 13:10:51.867673867 +1200
-+++ ecl-16.1.2/src/c/file.d 2016-05-11 14:44:48.121907307 +1200
-@@ -3354,8 +3354,10 @@
- ecl_disable_interrupts();
- do {
- out = fwrite(c, sizeof(char), n, IO_STREAM_FILE(strm));
-- } while (out < n && restartable_io_error(strm, "fwrite"));
-- ecl_enable_interrupts();
-+ /* Ignore write errors to stderr to avoid an infinite loop */
-+ } while (out < n && (IO_STREAM_FILE(strm) != stderr) && restartable_io_error(strm, "fwrite"));
-+
-+ ecl_enable_interrupts();
- return out;
- }
-
diff --git a/gnu/packages/patches/ecl-16-libffi.patch b/gnu/packages/patches/ecl-16-libffi.patch
deleted file mode 100644
index fc06a07606..0000000000
--- a/gnu/packages/patches/ecl-16-libffi.patch
+++ /dev/null
@@ -1,16 +0,0 @@
-Patch adapted from Sage. Allows building ECL on libffi 3.3.
-diff --git a/src/c/ffi.d b/src/c/ffi.d
-index 8861303e..8a959c23 100644
---- a/src/c/ffi.d
-+++ b/src/c/ffi.d
-@@ -133,8 +133,8 @@ static struct {
- #elif defined(X86_WIN64)
- {@':win64', FFI_WIN64},
- #elif defined(X86_ANY) || defined(X86) || defined(X86_64)
-- {@':cdecl', FFI_SYSV},
-- {@':sysv', FFI_SYSV},
-+ {@':cdecl', FFI_UNIX64},
-+ {@':sysv', FFI_UNIX64},
- {@':unix64', FFI_UNIX64},
- #endif
- };
diff --git a/gnu/packages/patches/emacs-libgit-use-system-libgit2.patch b/gnu/packages/patches/emacs-libgit-use-system-libgit2.patch
deleted file mode 100644
index 4a5546b06b..0000000000
--- a/gnu/packages/patches/emacs-libgit-use-system-libgit2.patch
+++ /dev/null
@@ -1,88 +0,0 @@
-From de3c48d72ec7064e7f0522877fe759c729df0c50 Mon Sep 17 00:00:00 2001
-From: Maxim Cournoyer <maxim.cournoyer@gmail.com>
-Date: Wed, 25 Mar 2020 11:32:18 -0400
-Subject: [PATCH] Allow using a system provided libgit2 library
-
-Setting the USE_SYSTEM_LIBGIT2 Make or CMake variable (through the
-BUILD_OPTIONS variable) to any value enables using the system library.
-The default behavior of using a bundled copy of libgit2 is unchanged.
----
- CMakeLists.txt | 9 +++++++--
- Makefile | 11 +++++++++++
- src/CMakeLists.txt | 9 +++++++--
- 3 files changed, 25 insertions(+), 4 deletions(-)
-
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index a393d7c..75d6ca6 100644
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -7,9 +7,14 @@ set(BUILD_SHARED_LIBS OFF CACHE BOOL "shared" FORCE)
- set(BUILD_CLAR OFF CACHE BOOL "clar" FORCE)
- set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -DEGIT_DEBUG")
-
--add_subdirectory(libgit2)
-+if(USE_SYSTEM_LIBGIT2)
-+ find_package(PkgConfig REQUIRED)
-+ pkg_check_modules(git2 REQUIRED IMPORTED_TARGET libgit2)
-+else()
-+ add_subdirectory(libgit2)
-+ find_library(git2 libgit2.a)
-+endif()
-
--find_library(git2 libgit2.a)
- add_subdirectory(src)
-
- enable_testing()
-diff --git a/Makefile b/Makefile
-index 8199532..6a6a4e1 100644
---- a/Makefile
-+++ b/Makefile
-@@ -13,6 +13,13 @@ ifeq ($(UNAME),MSYS)
- BUILD_OPTIONS+= -G "MSYS Makefiles"
- endif
-
-+# If the variable USE_SYSTEM_LIBGIT2 is set to *any* value, use the
-+# system provided libgit2 library.
-+USE_SYSTEM_LIBGIT2? := \
-+ $(if $(or $(USE_SYSTEM_LIBGIT2),\
-+ $(findstring USE_SYSTEM_LIBGIT2,$(BUILD_OPTIONS))),\
-+ true)
-+
- ifeq "$(TRAVIS)" "true"
- ## Makefile for Travis ###################################################
- #
-@@ -87,7 +94,11 @@ submodule-update:
- @git submodule update
-
- libgit2:
-+ifeq ($(USE_SYSTEM_LIBGIT2?),)
- @git submodule update --init
-+else
-+ @echo "Using the system provided libgit2 library"
-+endif
-
- CLEAN = $(ELCS) $(PKG)-autoloads.el build
-
-diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
-index cfb5777..0dbad8a 100644
---- a/src/CMakeLists.txt
-+++ b/src/CMakeLists.txt
-@@ -13,8 +13,13 @@ if(WIN32)
- set_target_properties(egit2 PROPERTIES PREFIX lib)
- endif(WIN32)
-
--target_link_libraries(egit2 git2)
--target_include_directories(egit2 SYSTEM PRIVATE "${libgit2_SOURCE_DIR}/include")
-+if(USE_SYSTEM_LIBGIT2)
-+ target_link_libraries(egit2 PRIVATE PkgConfig::git2)
-+else()
-+ target_link_libraries(egit2 git2)
-+ target_include_directories(
-+ egit2 SYSTEM PRIVATE "${libgit2_SOURCE_DIR}/include")
-+endif()
-
- if(CMAKE_COMPILER_IS_GNUCC)
- target_compile_options(egit2 PRIVATE -Wall -Wextra)
---
-2.26.2
-
diff --git a/gnu/packages/patches/emacs-native-comp-driver-options.patch b/gnu/packages/patches/emacs-native-comp-driver-options.patch
new file mode 100644
index 0000000000..308c4f1212
--- /dev/null
+++ b/gnu/packages/patches/emacs-native-comp-driver-options.patch
@@ -0,0 +1,17 @@
+We substitute this anyway, so let's make it easier to substitute.
+
+--- a/lisp/emacs-lisp/comp.el
++++ b/lisp/emacs-lisp/comp.el
+@@ -178,8 +178,7 @@ and above."
+ :type '(repeat string)
+ :version "28.1")
+
+-(defcustom native-comp-driver-options (when (eq system-type 'darwin)
+- '("-Wl,-w"))
++(defcustom native-comp-driver-options nil
+ "Options passed verbatim to the native compiler's back-end driver.
+ Note that not all options are meaningful; typically only the options
+ affecting the assembler and linker are likely to be useful.
+--
+2.38.0
+
diff --git a/gnu/packages/patches/emacs-telega-path-placeholder.patch b/gnu/packages/patches/emacs-telega-path-placeholder.patch
index 5829edd22a..07ab8c1e66 100644
--- a/gnu/packages/patches/emacs-telega-path-placeholder.patch
+++ b/gnu/packages/patches/emacs-telega-path-placeholder.patch
@@ -1,18 +1,31 @@
-From bf95de21faa623e48bca00d6a2c9b33ab2c5d812 Mon Sep 17 00:00:00 2001
+From bfcd616f2870c8c3ffc9a526fcd574eb5e726a96 Mon Sep 17 00:00:00 2001
From: Andrew Tropin <andrew@trop.in>
-Date: Wed, 8 Dec 2021 11:01:31 +0300
-Subject: [PATCH] Use absolute path for telega-server-command.
+Date: Sat, 14 Jan 2023 09:33:34 +0400
+Subject: [PATCH] Add path placeholder for telega-server-command and etc-file.
---
+ telega-core.el | 2 +-
telega-customize.el | 2 +-
- telega-util.el | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
+diff --git a/telega-core.el b/telega-core.el
+index 36f121e..f9f7976 100644
+--- a/telega-core.el
++++ b/telega-core.el
+@@ -41,7 +41,7 @@
+
+ (defun telega-etc-file (filename)
+ "Return absolute path to FILENAME from etc/ directory in telega."
+- (expand-file-name (concat "etc/" filename) telega--lib-directory))
++ (concat "@TELEGA_SHARE@" "/" filename))
+
+ (defconst telega-spoiler-translation-table
+ (let ((table (make-char-table 'translation-table)))
diff --git a/telega-customize.el b/telega-customize.el
-index 0af343f..cc2938c 100644
+index 0efb001..77cec5f 100644
--- a/telega-customize.el
+++ b/telega-customize.el
-@@ -591,7 +591,7 @@ In range [1..3]. Use 1."
+@@ -633,7 +633,7 @@ In range [1..3]. Use 1."
:prefix "telega-server-"
:group 'telega)
@@ -21,19 +34,5 @@ index 0af343f..cc2938c 100644
"Command to run as telega server.
It should be absolute path or binary file searchable in `exec-path'."
:type 'string
-diff --git a/telega-util.el b/telega-util.el
-index 6340c27..01e3cb7 100644
---- a/telega-util.el
-+++ b/telega-util.el
-@@ -587,7 +587,7 @@ N can't be 0."
-
- (defun telega-etc-file (filename)
- "Return absolute path to FILENAME from etc/ directory in telega."
-- (expand-file-name (concat "etc/" filename) telega--lib-directory))
-+ (concat "@TELEGA_SHARE@" "/" filename))
-
- (defun telega-link-props (link-type link-to &optional face)
- "Generate props for link button openable with `telega-link--button-action'."
--
-2.34.0
-
+2.38.1
diff --git a/gnu/packages/patches/flashrom-fix-building-on-aarch64.patch b/gnu/packages/patches/flashrom-fix-building-on-aarch64.patch
new file mode 100644
index 0000000000..9f54305b47
--- /dev/null
+++ b/gnu/packages/patches/flashrom-fix-building-on-aarch64.patch
@@ -0,0 +1,89 @@
+commit da6b3b70cb852dd8e9f9e21aef95fa83e7f7ab0d
+Author: Pyry Kontio <pyry.kontio@drasa.eu>
+Date: Mon Jul 6 12:57:35 2020 +0900
+
+ Makefile: Fix building on AArch64 NixOS
+
+ The parsing of the output of archtest.c produced an unexpected
+ value on AArch64 NixOS. For example, the make variable ARCH was set to:
+
+ ```
+ bit outside of fd_set selected
+ arm
+ ```
+
+ This made the arch and OS checks fail.
+
+ This commit simplifies the parsing, making it more robust.
+
+ The C files archtest.c, endiantest.c and os.h used to set the
+ TARGET_OS, ARCH and ENDIAN variables, respectively, output
+ the result of the test as the final line, so just extracting
+ the final line and removing double quoting is enough.
+
+ This commit also fixes a bug with debug_shell lacking escaping
+ single quotes, which prevented using the single quote in the
+ debug_shell calls. It used to work by accident before this fix;
+ the line in the call happened to contain a balanced pair of double
+ quotes and lacked other characters that needed escaping, which
+ didn't break the debug_shell, but this was accidental and very
+ brittle.
+
+ Signed-off-by: Pyry Kontio <pyry.kontio@drasa.eu>
+ Change-Id: Iaa4477a71e758cf9ecad2c22f3b77bc6508a3510
+ Reviewed-on: https://review.coreboot.org/c/flashrom/+/43140
+ Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
+ Reviewed-by: Angel Pons <th3fanbus@gmail.com>
+
+diff --git a/Makefile b/Makefile
+index f3f7717e..e475cbdb 100644
+--- a/Makefile
++++ b/Makefile
+@@ -83,7 +83,8 @@ dummy_for_make_3_80:=$(shell printf "Build started on %s\n\n" "$$(date)" >$(BUIL
+
+ # Provide an easy way to execute a command, print its output to stdout and capture any error message on stderr
+ # in the build details file together with the original stdout output.
+-debug_shell = $(shell export LC_ALL=C ; { echo 'exec: export LC_ALL=C ; { $(1) ; }' >&2; { $(1) ; } | tee -a $(BUILD_DETAILS_FILE) ; echo >&2 ; } 2>>$(BUILD_DETAILS_FILE))
++debug_shell = $(shell export LC_ALL=C ; { echo 'exec: export LC_ALL=C ; { $(subst ','\'',$(1)) ; }' >&2; \
++ { $(1) ; } | tee -a $(BUILD_DETAILS_FILE) ; echo >&2 ; } 2>>$(BUILD_DETAILS_FILE))
+
+ ###############################################################################
+ # General OS-specific settings.
+@@ -106,7 +107,8 @@ endif
+ # IMPORTANT: The following line must be placed before TARGET_OS is ever used
+ # (of course), but should come after any lines setting CC because the line
+ # below uses CC itself.
+-override TARGET_OS := $(strip $(call debug_shell,$(CC) $(CPPFLAGS) -E os.h 2>/dev/null | grep -v '^\#' | grep '"' | cut -f 2 -d'"'))
++override TARGET_OS := $(strip $(call debug_shell,$(CC) $(CPPFLAGS) -E os.h 2>/dev/null \
++ | tail -1 | cut -f 2 -d'"'))
+
+ ifeq ($(TARGET_OS), Darwin)
+ override CPPFLAGS += -I/opt/local/include -I/usr/local/include
+@@ -490,8 +492,10 @@ endif
+ # IMPORTANT: The following line must be placed before ARCH is ever used
+ # (of course), but should come after any lines setting CC because the line
+ # below uses CC itself.
+-override ARCH := $(strip $(call debug_shell,$(CC) $(CPPFLAGS) -E archtest.c 2>/dev/null | grep -v '^\#' | grep '"' | cut -f 2 -d'"'))
+-override ENDIAN := $(strip $(call debug_shell,$(CC) $(CPPFLAGS) -E endiantest.c 2>/dev/null | grep -v '^\#'))
++override ARCH := $(strip $(call debug_shell,$(CC) $(CPPFLAGS) -E archtest.c 2>/dev/null \
++ | tail -1 | cut -f 2 -d'"'))
++override ENDIAN := $(strip $(call debug_shell,$(CC) $(CPPFLAGS) -E endiantest.c 2>/dev/null \
++ | tail -1))
+
+ # Disable the internal programmer on unsupported architectures (everything but x86 and mipsel)
+ ifneq ($(ARCH)-little, $(filter $(ARCH),x86 mips)-$(ENDIAN))
+@@ -1299,12 +1303,12 @@ compiler: featuresavailable
+ @printf "Target arch is "
+ @# FreeBSD wc will output extraneous whitespace.
+ @echo $(ARCH)|wc -w|grep -q '^[[:blank:]]*1[[:blank:]]*$$' || \
+- ( echo "unknown. Aborting."; exit 1)
++ ( echo "unknown (\"$(ARCH)\"). Aborting."; exit 1)
+ @printf "%s\n" '$(ARCH)'
+ @printf "Target OS is "
+ @# FreeBSD wc will output extraneous whitespace.
+ @echo $(TARGET_OS)|wc -w|grep -q '^[[:blank:]]*1[[:blank:]]*$$' || \
+- ( echo "unknown. Aborting."; exit 1)
++ ( echo "unknown (\"$(TARGET_OS)\"). Aborting."; exit 1)
+ @printf "%s\n" '$(TARGET_OS)'
+ ifeq ($(TARGET_OS), libpayload)
+ @$(CC) --version 2>&1 | grep -q coreboot || \
diff --git a/gnu/packages/patches/fp16-implicit-double.patch b/gnu/packages/patches/fp16-implicit-double.patch
new file mode 100644
index 0000000000..87ed9a6a53
--- /dev/null
+++ b/gnu/packages/patches/fp16-implicit-double.patch
@@ -0,0 +1,23 @@
+Prevent implicit conversion of float to double to avoid precision
+error on i686.
+
+ https://github.com/Maratyszcza/FP16/issues/20
+
+Taken from Debian:
+
+ https://salsa.debian.org/deeplearning-team/fp16/-/blob/master/debian/patches/ftbfs-i386.patch
+
+Index: fp16/include/fp16/fp16.h
+===================================================================
+--- fp16.orig/include/fp16/fp16.h
++++ fp16/include/fp16/fp16.h
+@@ -228,7 +228,8 @@ static inline uint16_t fp16_ieee_from_fp
+ const float scale_to_inf = fp32_from_bits(UINT32_C(0x77800000));
+ const float scale_to_zero = fp32_from_bits(UINT32_C(0x08800000));
+ #endif
+- float base = (fabsf(f) * scale_to_inf) * scale_to_zero;
++ const volatile float base_inf = fabsf(f) * scale_to_inf;
++ float base = base_inf * scale_to_zero;
+
+ const uint32_t w = fp32_to_bits(f);
+ const uint32_t shl1_w = w + w;
diff --git a/gnu/packages/patches/gemmi-fix-pegtl-usage.patch b/gnu/packages/patches/gemmi-fix-pegtl-usage.patch
new file mode 100644
index 0000000000..3667474847
--- /dev/null
+++ b/gnu/packages/patches/gemmi-fix-pegtl-usage.patch
@@ -0,0 +1,31 @@
+Use the definitions from (newer) upstream PEGTL.
+
+diff --git a/include/gemmi/cif.hpp b/include/gemmi/cif.hpp
+index c7ffdb44..35d24210 100644
+--- a/include/gemmi/cif.hpp
++++ b/include/gemmi/cif.hpp
+@@ -37,7 +37,6 @@ namespace pegtl = tao::pegtl;
+ namespace rules {
+
+ template<int TableVal> struct lookup_char {
+- using analyze_t = pegtl::analysis::generic<pegtl::analysis::rule_type::ANY>;
+ template<typename Input> static bool match(Input& in) {
+ if (!in.empty() && cif::char_table(in.peek_char()) == TableVal) {
+ if (TableVal == 2) // this set includes new-line
+@@ -71,11 +70,11 @@ namespace rules {
+ struct ws_or_eof : pegtl::sor<whitespace, pegtl::eof> {};
+
+ // (b) Reserved words.
+- struct str_data : TAOCPP_PEGTL_ISTRING("data_") {};
+- struct str_loop : TAOCPP_PEGTL_ISTRING("loop_") {};
+- struct str_global : TAOCPP_PEGTL_ISTRING("global_") {};
+- struct str_save : TAOCPP_PEGTL_ISTRING("save_") {};
+- struct str_stop : TAOCPP_PEGTL_ISTRING("stop_") {};
++ struct str_data : TAO_PEGTL_ISTRING("data_") {};
++ struct str_loop : TAO_PEGTL_ISTRING("loop_") {};
++ struct str_global : TAO_PEGTL_ISTRING("global_") {};
++ struct str_save : TAO_PEGTL_ISTRING("save_") {};
++ struct str_stop : TAO_PEGTL_ISTRING("stop_") {};
+ struct keyword : pegtl::sor<str_data, str_loop, str_global,
+ str_save, str_stop> {};
+
diff --git a/gnu/packages/patches/gemmi-fix-sajson-types.patch b/gnu/packages/patches/gemmi-fix-sajson-types.patch
new file mode 100644
index 0000000000..9633ddac8b
--- /dev/null
+++ b/gnu/packages/patches/gemmi-fix-sajson-types.patch
@@ -0,0 +1,11 @@
+diff -ur a/include/gemmi/json.hpp b/include/gemmi/json.hpp
+--- a/include/gemmi/json.hpp
++++ b/include/gemmi/json.hpp
+@@ -38,6 +38,7 @@
+
+ inline std::string as_cif_value(const sajson::value& val) {
+ switch (val.get_type()) {
++ case sajson::TYPE_INTEGER:
+ case sajson::TYPE_DOUBLE:
+ return val.as_string();
+ case sajson::TYPE_NULL:
diff --git a/gnu/packages/patches/ghc-4.patch b/gnu/packages/patches/ghc-4.patch
deleted file mode 100644
index 87484f575d..0000000000
--- a/gnu/packages/patches/ghc-4.patch
+++ /dev/null
@@ -1,708 +0,0 @@
-The GHC 4 runtime system was written before GCC 3.5 deprecated lvalue casts.
-The runtime system's sources are littered with these casts, so early versions
-of this patch were dedicated to rewriting those statements to a standards
-compliant form. Unfortunately, this led to subtle breakage, so instead we
-build with GCC 2.95.
-
-Problematic for newer versions of GCC is also the assembly in the bundled
-sources of GMP 2.0.2, which spans multiple lines without escaping line breaks.
-
-TODO: We aren't yet using anything under ghc/compiler, so the patches there
-aren't needed at this time. The intent was to ensure that the compiler
-sources can be used even when they are interpreted by Hugs.
-
-TODO: There are some more problems with the Haskell sources. Some files have
-too many commas (both at the end of the line and at the beginning of the next
-line). Others use a trailing hash, which Hugs doesn't understand.
-
-TODO: Hugs doesn't understand "unsafe" in hslib/lang/Storable.lhs
-
-diff --git a/ghc/compiler/main/CmdLineOpts.lhs b/ghc/compiler/main/CmdLineOpts.lhs
-index ca1b58d..074fcaf 100644
---- a/ghc/compiler/main/CmdLineOpts.lhs
-+++ b/ghc/compiler/main/CmdLineOpts.lhs
-@@ -163,9 +163,9 @@ import Constants -- Default values for some flags
-
- import FastString ( headFS )
- import Maybes ( assocMaybe, firstJust, maybeToBool )
--import Panic ( panic, panic# )
-+import Panic ( panic, panic' )
-
--#if __GLASGOW_HASKELL__ < 301
-+#if __GLASGOW_HASKELL__ && __GLASGOW_HASKELL__ < 301
- import ArrBase ( Array(..) )
- #else
- import PrelArr ( Array(..) )
-diff --git a/ghc/compiler/prelude/PrimOp.lhs b/ghc/compiler/prelude/PrimOp.lhs
-index 7a0627d..59802c4 100644
---- a/ghc/compiler/prelude/PrimOp.lhs
-+++ b/ghc/compiler/prelude/PrimOp.lhs
-@@ -502,7 +502,7 @@ tagOf_PrimOp UnblockAsyncExceptionsOp = ILIT(260)
- tagOf_PrimOp DataToTagOp = ILIT(261)
- tagOf_PrimOp TagToEnumOp = ILIT(262)
-
--tagOf_PrimOp op = pprPanic# "tagOf_PrimOp: pattern-match" (ppr op)
-+tagOf_PrimOp op = pprPanic' "tagOf_PrimOp: pattern-match" (ppr op)
-
- instance Eq PrimOp where
- op1 == op2 = tagOf_PrimOp op1 _EQ_ tagOf_PrimOp op2
-diff --git a/ghc/compiler/utils/Outputable.lhs b/ghc/compiler/utils/Outputable.lhs
-index 19ad666..89d07cb 100644
---- a/ghc/compiler/utils/Outputable.lhs
-+++ b/ghc/compiler/utils/Outputable.lhs
-@@ -42,8 +42,8 @@ module Outputable (
-
-
- -- error handling
-- pprPanic, pprPanic#, pprError, pprTrace, assertPprPanic, warnPprTrace,
-- trace, panic, panic#, assertPanic
-+ pprPanic, pprPanic', pprError, pprTrace, assertPprPanic, warnPprTrace,
-+ trace, panic, panic', assertPanic
- ) where
-
- #include "HsVersions.h"
-@@ -420,7 +420,7 @@ pprPanic = pprAndThen panic
- pprError = pprAndThen error
- pprTrace = pprAndThen trace
-
--pprPanic# heading pretty_msg = panic# (show (doc PprDebug))
-+pprPanic' heading pretty_msg = panic' (show (doc PprDebug))
- where
- doc = text heading <+> pretty_msg
-
-diff --git a/ghc/compiler/utils/Panic.lhs b/ghc/compiler/utils/Panic.lhs
-index 907d8aa..37a2d87 100644
---- a/ghc/compiler/utils/Panic.lhs
-+++ b/ghc/compiler/utils/Panic.lhs
-@@ -9,7 +9,7 @@ It's hard to put these functions anywhere else without causing
- some unnecessary loops in the module dependency graph.
-
- \begin{code}
--module Panic ( panic, panic#, assertPanic, trace ) where
-+module Panic ( panic, panic', assertPanic, trace ) where
-
- import IOExts ( trace )
-
-@@ -27,8 +27,8 @@ panic x = error ("panic! (the `impossible' happened):\n\t"
- -- what TAG_ is with GHC at the moment. Ugh. (Simon)
- -- No, man -- Too Beautiful! (Will)
-
--panic# :: String -> FAST_INT
--panic# s = case (panic s) of () -> ILIT(0)
-+panic' :: String -> FAST_INT
-+panic' s = case (panic s) of () -> ILIT(0)
-
- assertPanic :: String -> Int -> a
- assertPanic file line = panic ("ASSERT failed! file " ++ file ++ ", line " ++ show line)
-diff --git a/ghc/includes/PrimOps.h b/ghc/includes/PrimOps.h
-index 8b8c2f9..7f43ab0 100644
---- a/ghc/includes/PrimOps.h
-+++ b/ghc/includes/PrimOps.h
-@@ -893,6 +893,7 @@ EXTFUN_RTS(mkForeignObjzh_fast);
- #define STG_SIG_ERR (-3)
- #define STG_SIG_HAN (-4)
-
-+#include <signal.h>
- extern StgInt sig_install (StgInt, StgInt, StgStablePtr, sigset_t *);
- #define stg_sig_default(sig,mask) sig_install(sig,STG_SIG_DFL,0,(sigset_t *)mask)
- #define stg_sig_ignore(sig,mask) sig_install(sig,STG_SIG_IGN,0,(sigset_t *)mask)
-diff --git a/ghc/rts/RtsFlags.c b/ghc/rts/RtsFlags.c
-index a05036f..9cd6c83 100644
---- a/ghc/rts/RtsFlags.c
-+++ b/ghc/rts/RtsFlags.c
-@@ -1132,8 +1132,7 @@ process_gran_option(int arg, int *rts_argc, char *rts_argv[], rtsBool *error)
- } else if (RtsFlags.GranFlags.proc > MAX_PROC ||
- RtsFlags.GranFlags.proc < 1)
- {
-- fprintf(stderr,"setupRtsFlags: no more than %u processors
--allowed\n",
-+ fprintf(stderr,"setupRtsFlags: no more than %u processors allowed\n",
- MAX_PROC);
- *error = rtsTrue;
- }
-diff --git a/ghc/rts/gmp/longlong.h b/ghc/rts/gmp/longlong.h
-index 382fcc0..0cf79fa 100644
---- a/ghc/rts/gmp/longlong.h
-+++ b/ghc/rts/gmp/longlong.h
-@@ -106,7 +106,7 @@ MA 02111-1307, USA. */
-
- #if (defined (__a29k__) || defined (_AM29K)) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("add %1,%4,%5
-+ __asm__ ("add %1,%4,%5\n\
- addc %0,%2,%3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -115,7 +115,7 @@ MA 02111-1307, USA. */
- "%r" ((USItype)(al)), \
- "rI" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("sub %1,%4,%5
-+ __asm__ ("sub %1,%4,%5\n\
- subc %0,%2,%3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -173,7 +173,7 @@ extern UDItype __udiv_qrnnd ();
-
- #if defined (__arm__) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("adds %1, %4, %5
-+ __asm__ ("adds %1, %4, %5\n\
- adc %0, %2, %3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -182,7 +182,7 @@ extern UDItype __udiv_qrnnd ();
- "%r" ((USItype)(al)), \
- "rI" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("subs %1, %4, %5
-+ __asm__ ("subs %1, %4, %5\n\
- sbc %0, %2, %3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -191,18 +191,18 @@ extern UDItype __udiv_qrnnd ();
- "r" ((USItype)(al)), \
- "rI" ((USItype)(bl)))
- #define umul_ppmm(xh, xl, a, b) \
-- __asm__ ("%@ Inlined umul_ppmm
-- mov %|r0, %2, lsr #16
-- mov %|r2, %3, lsr #16
-- bic %|r1, %2, %|r0, lsl #16
-- bic %|r2, %3, %|r2, lsl #16
-- mul %1, %|r1, %|r2
-- mul %|r2, %|r0, %|r2
-- mul %|r1, %0, %|r1
-- mul %0, %|r0, %0
-- adds %|r1, %|r2, %|r1
-- addcs %0, %0, #65536
-- adds %1, %1, %|r1, lsl #16
-+ __asm__ ("%@ Inlined umul_ppmm\n\
-+ mov %|r0, %2, lsr #16\n\
-+ mov %|r2, %3, lsr #16\n\
-+ bic %|r1, %2, %|r0, lsl #16\n\
-+ bic %|r2, %3, %|r2, lsl #16\n\
-+ mul %1, %|r1, %|r2\n\
-+ mul %|r2, %|r0, %|r2\n\
-+ mul %|r1, %0, %|r1\n\
-+ mul %0, %|r0, %0\n\
-+ adds %|r1, %|r2, %|r1\n\
-+ addcs %0, %0, #65536\n\
-+ adds %1, %1, %|r1, lsl #16\n\
- adc %0, %0, %|r1, lsr #16" \
- : "=&r" ((USItype)(xh)), \
- "=r" ((USItype)(xl)) \
-@@ -243,7 +243,7 @@ extern UDItype __udiv_qrnnd ();
-
- #if defined (__gmicro__) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("add.w %5,%1
-+ __asm__ ("add.w %5,%1\n\
- addx %3,%0" \
- : "=g" ((USItype)(sh)), \
- "=&g" ((USItype)(sl)) \
-@@ -252,7 +252,7 @@ extern UDItype __udiv_qrnnd ();
- "%1" ((USItype)(al)), \
- "g" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("sub.w %5,%1
-+ __asm__ ("sub.w %5,%1\n\
- subx %3,%0" \
- : "=g" ((USItype)(sh)), \
- "=&g" ((USItype)(sl)) \
-@@ -282,7 +282,7 @@ extern UDItype __udiv_qrnnd ();
-
- #if defined (__hppa) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("add %4,%5,%1
-+ __asm__ ("add %4,%5,%1\n\
- addc %2,%3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -291,7 +291,7 @@ extern UDItype __udiv_qrnnd ();
- "%rM" ((USItype)(al)), \
- "rM" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("sub %4,%5,%1
-+ __asm__ ("sub %4,%5,%1\n\
- subb %2,%3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -330,21 +330,21 @@ extern USItype __udiv_qrnnd ();
- do { \
- USItype __tmp; \
- __asm__ ( \
-- "ldi 1,%0
-- extru,= %1,15,16,%%r0 ; Bits 31..16 zero?
-- extru,tr %1,15,16,%1 ; No. Shift down, skip add.
-- ldo 16(%0),%0 ; Yes. Perform add.
-- extru,= %1,23,8,%%r0 ; Bits 15..8 zero?
-- extru,tr %1,23,8,%1 ; No. Shift down, skip add.
-- ldo 8(%0),%0 ; Yes. Perform add.
-- extru,= %1,27,4,%%r0 ; Bits 7..4 zero?
-- extru,tr %1,27,4,%1 ; No. Shift down, skip add.
-- ldo 4(%0),%0 ; Yes. Perform add.
-- extru,= %1,29,2,%%r0 ; Bits 3..2 zero?
-- extru,tr %1,29,2,%1 ; No. Shift down, skip add.
-- ldo 2(%0),%0 ; Yes. Perform add.
-- extru %1,30,1,%1 ; Extract bit 1.
-- sub %0,%1,%0 ; Subtract it.
-+ "ldi 1,%0\n\
-+ extru,= %1,15,16,%%r0 ; Bits 31..16 zero?\n\
-+ extru,tr %1,15,16,%1 ; No. Shift down, skip add.\n\
-+ ldo 16(%0),%0 ; Yes. Perform add.\n\
-+ extru,= %1,23,8,%%r0 ; Bits 15..8 zero?\n\
-+ extru,tr %1,23,8,%1 ; No. Shift down, skip add.\n\
-+ ldo 8(%0),%0 ; Yes. Perform add.\n\
-+ extru,= %1,27,4,%%r0 ; Bits 7..4 zero?\n\
-+ extru,tr %1,27,4,%1 ; No. Shift down, skip add.\n\
-+ ldo 4(%0),%0 ; Yes. Perform add.\n\
-+ extru,= %1,29,2,%%r0 ; Bits 3..2 zero?\n\
-+ extru,tr %1,29,2,%1 ; No. Shift down, skip add.\n\
-+ ldo 2(%0),%0 ; Yes. Perform add.\n\
-+ extru %1,30,1,%1 ; Extract bit 1.\n\
-+ sub %0,%1,%0 ; Subtract it.\n\
- " : "=r" (count), "=r" (__tmp) : "1" (x)); \
- } while (0)
- #endif /* hppa */
-@@ -392,7 +392,7 @@ extern USItype __udiv_qrnnd ();
-
- #if (defined (__i386__) || defined (__i486__)) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("addl %5,%1
-+ __asm__ ("addl %5,%1\n\
- adcl %3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -401,7 +401,7 @@ extern USItype __udiv_qrnnd ();
- "%1" ((USItype)(al)), \
- "g" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("subl %5,%1
-+ __asm__ ("subl %5,%1\n\
- sbbl %3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -514,7 +514,7 @@ extern USItype __udiv_qrnnd ();
-
- #if (defined (__mc68000__) || defined (__mc68020__) || defined (__NeXT__) || defined(mc68020)) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("add%.l %5,%1
-+ __asm__ ("add%.l %5,%1\n\
- addx%.l %3,%0" \
- : "=d" ((USItype)(sh)), \
- "=&d" ((USItype)(sl)) \
-@@ -523,7 +523,7 @@ extern USItype __udiv_qrnnd ();
- "%1" ((USItype)(al)), \
- "g" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("sub%.l %5,%1
-+ __asm__ ("sub%.l %5,%1\n\
- subx%.l %3,%0" \
- : "=d" ((USItype)(sh)), \
- "=&d" ((USItype)(sl)) \
-@@ -562,27 +562,27 @@ extern USItype __udiv_qrnnd ();
- #else /* not mc68020 */
- #define umul_ppmm(xh, xl, a, b) \
- do { USItype __umul_tmp1, __umul_tmp2; \
-- __asm__ ("| Inlined umul_ppmm
-- move%.l %5,%3
-- move%.l %2,%0
-- move%.w %3,%1
-- swap %3
-- swap %0
-- mulu %2,%1
-- mulu %3,%0
-- mulu %2,%3
-- swap %2
-- mulu %5,%2
-- add%.l %3,%2
-- jcc 1f
-- add%.l %#0x10000,%0
--1: move%.l %2,%3
-- clr%.w %2
-- swap %2
-- swap %3
-- clr%.w %3
-- add%.l %3,%1
-- addx%.l %2,%0
-+ __asm__ ("| Inlined umul_ppmm\n\
-+ move%.l %5,%3\n\
-+ move%.l %2,%0\n\
-+ move%.w %3,%1\n\
-+ swap %3\n\
-+ swap %0\n\
-+ mulu %2,%1\n\
-+ mulu %3,%0\n\
-+ mulu %2,%3\n\
-+ swap %2\n\
-+ mulu %5,%2\n\
-+ add%.l %3,%2\n\
-+ jcc 1f\n\
-+ add%.l %#0x10000,%0\n\
-+1: move%.l %2,%3\n\
-+ clr%.w %2\n\
-+ swap %2\n\
-+ swap %3\n\
-+ clr%.w %3\n\
-+ add%.l %3,%1\n\
-+ addx%.l %2,%0\n\
- | End inlined umul_ppmm" \
- : "=&d" ((USItype)(xh)), "=&d" ((USItype)(xl)), \
- "=d" (__umul_tmp1), "=&d" (__umul_tmp2) \
-@@ -595,7 +595,7 @@ extern USItype __udiv_qrnnd ();
-
- #if defined (__m88000__) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("addu.co %1,%r4,%r5
-+ __asm__ ("addu.co %1,%r4,%r5\n\
- addu.ci %0,%r2,%r3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -604,7 +604,7 @@ extern USItype __udiv_qrnnd ();
- "%rJ" ((USItype)(al)), \
- "rJ" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("subu.co %1,%r4,%r5
-+ __asm__ ("subu.co %1,%r4,%r5\n\
- subu.ci %0,%r2,%r3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -663,8 +663,8 @@ extern USItype __udiv_qrnnd ();
- "d" ((USItype)(v)))
- #else
- #define umul_ppmm(w1, w0, u, v) \
-- __asm__ ("multu %2,%3
-- mflo %0
-+ __asm__ ("multu %2,%3\n\
-+ mflo %0\n\
- mfhi %1" \
- : "=d" ((USItype)(w0)), \
- "=d" ((USItype)(w1)) \
-@@ -685,8 +685,8 @@ extern USItype __udiv_qrnnd ();
- "d" ((UDItype)(v)))
- #else
- #define umul_ppmm(w1, w0, u, v) \
-- __asm__ ("dmultu %2,%3
-- mflo %0
-+ __asm__ ("dmultu %2,%3\n\
-+ mflo %0\n\
- mfhi %1" \
- : "=d" ((UDItype)(w0)), \
- "=d" ((UDItype)(w1)) \
-@@ -855,7 +855,7 @@ extern USItype __udiv_qrnnd ();
-
- #if defined (__pyr__) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("addw %5,%1
-+ __asm__ ("addw %5,%1\n\
- addwc %3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -864,7 +864,7 @@ extern USItype __udiv_qrnnd ();
- "%1" ((USItype)(al)), \
- "g" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("subw %5,%1
-+ __asm__ ("subw %5,%1\n\
- subwb %3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -877,7 +877,7 @@ extern USItype __udiv_qrnnd ();
- ({union {UDItype __ll; \
- struct {USItype __h, __l;} __i; \
- } __xx; \
-- __asm__ ("movw %1,%R0
-+ __asm__ ("movw %1,%R0\n\
- uemul %2,%0" \
- : "=&r" (__xx.__ll) \
- : "g" ((USItype) (u)), \
-@@ -887,7 +887,7 @@ extern USItype __udiv_qrnnd ();
-
- #if defined (__ibm032__) /* RT/ROMP */ && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("a %1,%5
-+ __asm__ ("a %1,%5\n\
- ae %0,%3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -896,7 +896,7 @@ extern USItype __udiv_qrnnd ();
- "%1" ((USItype)(al)), \
- "r" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("s %1,%5
-+ __asm__ ("s %1,%5\n\
- se %0,%3" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -908,25 +908,25 @@ extern USItype __udiv_qrnnd ();
- do { \
- USItype __m0 = (m0), __m1 = (m1); \
- __asm__ ( \
-- "s r2,r2
-- mts r10,%2
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- m r2,%3
-- cas %0,r2,r0
-+ "s r2,r2\n\
-+ mts r10,%2\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ m r2,%3\n\
-+ cas %0,r2,r0\n\
- mfs r10,%1" \
- : "=r" ((USItype)(ph)), \
- "=r" ((USItype)(pl)) \
-@@ -957,8 +957,8 @@ extern USItype __udiv_qrnnd ();
- #if defined (__sh2__) && W_TYPE_SIZE == 32
- #define umul_ppmm(w1, w0, u, v) \
- __asm__ ( \
-- "dmulu.l %2,%3
-- sts macl,%1
-+ "dmulu.l %2,%3\n\
-+ sts macl,%1\n\
- sts mach,%0" \
- : "=r" ((USItype)(w1)), \
- "=r" ((USItype)(w0)) \
-@@ -970,7 +970,7 @@ extern USItype __udiv_qrnnd ();
-
- #if defined (__sparc__) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("addcc %r4,%5,%1
-+ __asm__ ("addcc %r4,%5,%1\n\
- addx %r2,%3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -980,7 +980,7 @@ extern USItype __udiv_qrnnd ();
- "rI" ((USItype)(bl)) \
- __CLOBBER_CC)
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("subcc %r4,%5,%1
-+ __asm__ ("subcc %r4,%5,%1\n\
- subx %r2,%3,%0" \
- : "=r" ((USItype)(sh)), \
- "=&r" ((USItype)(sl)) \
-@@ -1027,44 +1027,44 @@ extern USItype __udiv_qrnnd ();
- "r" ((USItype)(v)))
- #define UMUL_TIME 5
- #define udiv_qrnnd(q, r, n1, n0, d) \
-- __asm__ ("! Inlined udiv_qrnnd
-- wr %%g0,%2,%%y ! Not a delayed write for sparclite
-- tst %%g0
-- divscc %3,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%%g1
-- divscc %%g1,%4,%0
-- rd %%y,%1
-- bl,a 1f
-- add %1,%4,%1
-+ __asm__ ("! Inlined udiv_qrnnd\n\
-+ wr %%g0,%2,%%y ! Not a delayed write for sparclite\n\
-+ tst %%g0\n\
-+ divscc %3,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%%g1\n\
-+ divscc %%g1,%4,%0\n\
-+ rd %%y,%1\n\
-+ bl,a 1f\n\
-+ add %1,%4,%1\n\
- 1: ! End of inline udiv_qrnnd" \
- : "=r" ((USItype)(q)), \
- "=r" ((USItype)(r)) \
-@@ -1085,45 +1085,45 @@ extern USItype __udiv_qrnnd ();
- /* Default to sparc v7 versions of umul_ppmm and udiv_qrnnd. */
- #ifndef umul_ppmm
- #define umul_ppmm(w1, w0, u, v) \
-- __asm__ ("! Inlined umul_ppmm
-- wr %%g0,%2,%%y ! SPARC has 0-3 delay insn after a wr
-- sra %3,31,%%g2 ! Don't move this insn
-- and %2,%%g2,%%g2 ! Don't move this insn
-- andcc %%g0,0,%%g1 ! Don't move this insn
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,%3,%%g1
-- mulscc %%g1,0,%%g1
-- add %%g1,%%g2,%0
-+ __asm__ ("! Inlined umul_ppmm\n\
-+ wr %%g0,%2,%%y ! SPARC has 0-3 delay insn after a wr\n\
-+ sra %3,31,%%g2 ! Don't move this insn\n\
-+ and %2,%%g2,%%g2 ! Don't move this insn\n\
-+ andcc %%g0,0,%%g1 ! Don't move this insn\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,%3,%%g1\n\
-+ mulscc %%g1,0,%%g1\n\
-+ add %%g1,%%g2,%0\n\
- rd %%y,%1" \
- : "=r" ((USItype)(w1)), \
- "=r" ((USItype)(w0)) \
-@@ -1147,7 +1147,7 @@ extern USItype __udiv_qrnnd ();
-
- #if defined (__vax__) && W_TYPE_SIZE == 32
- #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-- __asm__ ("addl2 %5,%1
-+ __asm__ ("addl2 %5,%1\n\
- adwc %3,%0" \
- : "=g" ((USItype)(sh)), \
- "=&g" ((USItype)(sl)) \
-@@ -1156,7 +1156,7 @@ extern USItype __udiv_qrnnd ();
- "%1" ((USItype)(al)), \
- "g" ((USItype)(bl)))
- #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-- __asm__ ("subl2 %5,%1
-+ __asm__ ("subl2 %5,%1\n\
- sbwc %3,%0" \
- : "=g" ((USItype)(sh)), \
- "=&g" ((USItype)(sl)) \
-diff --git a/ghc/lib/std/CPUTime.lhs b/ghc/lib/std/CPUTime.lhs
---- a/ghc/lib/std/CPUTime.lhs
-+++ b/ghc/lib/std/CPUTime.lhs
-@@ -9,6 +9,6 @@
- module CPUTime
- (
- getCPUTime, -- :: IO Integer
-- cpuTimePrecision -- :: Integer
-+ cpuTimePrecision -- :: Integer
- ) where
- \end{code}
- \ No newline at end of file
diff --git a/gnu/packages/patches/giara-fix-login.patch b/gnu/packages/patches/giara-fix-login.patch
deleted file mode 100644
index f15caaf559..0000000000
--- a/gnu/packages/patches/giara-fix-login.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From f4adc1f52d777cea3331b4acae0c3bdec4d0eb70 Mon Sep 17 00:00:00 2001
-From: Paper <paper@tilde.institute>
-Date: Fri, 12 Mar 2021 21:10:34 +0000
-Subject: [PATCH] fix login by removing everything after # from URL
-
----
- giara/__main__.py | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/giara/__main__.py b/giara/__main__.py
-index 10b9ae2..d235332 100644
---- a/giara/__main__.py
-+++ b/giara/__main__.py
-@@ -53,8 +53,9 @@ class GApplication(Gtk.Application):
- def open(self, app, files, *args):
- target = files[0].get_uri()
- print(target)
-+ code = target.split('=')[-1].split('#')[0]
- get_authorized_client(
-- reddit=self._unauth_reddit, code=target.split('=')[-1]
-+ reddit=self._unauth_reddit, code=code
- )
- self.continue_activate(self._unauth_reddit)
-
---
-GitLab
-
diff --git a/gnu/packages/patches/gromacs-tinyxml2.patch b/gnu/packages/patches/gromacs-tinyxml2.patch
index cc7d7459a8..6f11e174df 100644
--- a/gnu/packages/patches/gromacs-tinyxml2.patch
+++ b/gnu/packages/patches/gromacs-tinyxml2.patch
@@ -1,10 +1,7 @@
Unbundling tinyxml2 from gromacs and using our own, which is newer, broke gromacs
build.
-This patch fixes three issues:
-
-- cmake now errors out if using multiple target_link_libraries with mixed styles
- of signatures.
+This patch fixes this issue:
- Error handling API changed, fix the testutils/refdata_xml.cpp code by using the
new API: document.ErrorStr() & tinyxml2::XML_SUCCESS.
@@ -15,22 +12,25 @@ there as long as they still keep the old version bundled.
First hunk has already been requested for merging. Third is in discussion. Second
will only be sent if third is OK'ed.
-diff -ruN gromacs-2020.2/src/testutils/CMakeLists.txt gromacs-2020.2-fixed/src/testutils/CMakeLists.txt
---- gromacs-2020.2/src/testutils/CMakeLists.txt 2020-04-30 18:33:44.000000000 +0200
-+++ gromacs-2020.2-fixed/src/testutils/CMakeLists.txt 2020-05-01 22:52:16.356000000 +0200
-@@ -73,7 +73,7 @@
-
- if(HAVE_TINYXML2)
- include_directories(SYSTEM ${TinyXML2_INCLUDE_DIR})
-- target_link_libraries(testutils ${TinyXML2_LIBRARIES})
-+ target_link_libraries(testutils PRIVATE ${TinyXML2_LIBRARIES})
- else()
- include_directories(BEFORE SYSTEM "../external/tinyxml2")
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index cd748c9..1e90c95 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -539,9 +539,6 @@ if(GMX_EXTERNAL_TINYXML2)
+ if(NOT HAVE_TINYXML2)
+ message(FATAL_ERROR "External TinyXML-2 could not be found, please adjust your search paths")
+ endif()
+- if (TinyXML2_FOUND AND TinyXML2_VERSION VERSION_GREATER "6")
+- message(FATAL_ERROR "External TinyXML-2 is later than the highest supported version 6. Please adjust your search paths to include a supported version")
+- endif()
endif()
-diff -ruN gromacs-2020.2/src/testutils/refdata_xml.cpp gromacs-2020.2-fixed/src/testutils/refdata_xml.cpp
---- gromacs-2020.2/src/testutils/refdata_xml.cpp 2020-04-30 18:33:44.000000000 +0200
-+++ gromacs-2020.2-fixed/src/testutils/refdata_xml.cpp 2020-05-01 23:17:09.556000000 +0200
-@@ -206,21 +206,12 @@
+
+ option(GMX_EXTRAE "Add support for tracing using EXTRAE" OFF)
+diff --git a/src/testutils/refdata_xml.cpp b/src/testutils/refdata_xml.cpp
+index 0eb2209..57cebff 100644
+--- a/src/testutils/refdata_xml.cpp
++++ b/src/testutils/refdata_xml.cpp
+@@ -206,21 +206,12 @@ ReferenceDataEntry::EntryPointer readReferenceDataFile(const std::string& path)
document.LoadFile(path.c_str());
if (document.Error())
{
@@ -56,7 +56,7 @@ diff -ruN gromacs-2020.2/src/testutils/refdata_xml.cpp gromacs-2020.2-fixed/src/
GMX_THROW(TestException("Reference data not parsed successfully: " + path + "\n."
+ errorString + "\n"));
}
-@@ -371,7 +362,7 @@
+@@ -371,7 +362,7 @@ void writeReferenceDataFile(const std::string& path, const ReferenceDataEntry& r
XMLElementPtr rootElement = createRootElement(&document);
createChildElements(rootElement, rootEntry);
diff --git a/gnu/packages/patches/guile-email-fix-tests.patch b/gnu/packages/patches/guile-email-fix-tests.patch
deleted file mode 100644
index 8621aaea02..0000000000
--- a/gnu/packages/patches/guile-email-fix-tests.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-This patch has been proposed upstream:
-https://lists.systemreboot.net/guile-email/87mtnv1r2p.fsf@gnu.org/T/#u.
-
-From c21fe0f0e28b80b606973d3e372e2bc8528c9766 Mon Sep 17 00:00:00 2001
-From: Mathieu Othacehe <othacehe@gnu.org>
-Date: Wed, 29 Sep 2021 12:47:35 +0000
-Subject: [PATCH 1/1] email: Do not use an empty bytevector to test the
- charset.
-
-Using an empty bytevector no longer throws an exception since this Guile
-commit: 5ea8c69e9153a970952bf6f0b32c4fad6a28e839.
-
-* email/email.scm (post-process-content-transfer-encoding): Use a bytevector
-containg the 'e' character to test the charset validity.
----
- email/email.scm | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/email/email.scm b/email/email.scm
-index 3f4e194..ac70463 100644
---- a/email/email.scm
-+++ b/email/email.scm
-@@ -832,7 +832,8 @@ values. The returned headers is a string and body is a bytevector."
- (define (valid-charset? charset)
- (catch #t
- (lambda ()
-- (bytevector->string (make-bytevector 0 0) charset)
-+ ;; Try to convert a bytevector containg the 'e' character.
-+ (bytevector->string (make-bytevector 1 48) charset)
- #t)
- (const #f)))
-
---
-2.33.0
-
diff --git a/gnu/packages/patches/guile-fibers-fd-finalizer-leak.patch b/gnu/packages/patches/guile-fibers-fd-finalizer-leak.patch
new file mode 100644
index 0000000000..2d8f9c2cf5
--- /dev/null
+++ b/gnu/packages/patches/guile-fibers-fd-finalizer-leak.patch
@@ -0,0 +1,54 @@
+Upstream fix for a memory leak introduced in Fibers 1.1.0 that would manifest
+in shepherd:
+
+ https://github.com/wingo/fibers/issues/65
+ https://issues.guix.gnu.org/58631
+
+diff --git a/fibers/scheduler.scm b/fibers/scheduler.scm
+index 2b03941..760b037 100644
+--- a/fibers/scheduler.scm
++++ b/fibers/scheduler.scm
+@@ -182,8 +182,10 @@ remote kernel thread."
+ (#f (warn "scheduler for unknown fd" fd))
+ ((and events+waiters (active-events . waiters))
+ ;; First, clear the active status, as the EPOLLONESHOT has
+- ;; deactivated our entry in the epoll set.
+- (set-car! events+waiters #f)
++ ;; deactivated our entry in the epoll set. Set the car to 0, not #f, so
++ ;; that 'schedule-tasks-for-active-fd' doesn't end up re-adding a
++ ;; finalizer on FD.
++ (set-car! events+waiters 0)
+ (set-cdr! events+waiters '())
+ (unless (zero? (logand revents (logior EPOLLHUP EPOLLERR)))
+ (hashv-remove! (scheduler-fd-waiters sched) fd))
+@@ -336,21 +338,19 @@ expressed as an epoll bitfield."
+
+ (let ((fd-waiters (hashv-ref (scheduler-fd-waiters sched) fd)))
+ (match fd-waiters
+- ((active-events . waiters)
+- (set-cdr! fd-waiters (acons events task waiters))
+- (unless (and active-events
+- (= (logand events active-events) events))
+- (let ((active-events (logior events (or active-events 0))))
+- (set-car! fd-waiters active-events)
+- (add-fdes-finalizer! fd (fd-finalizer fd-waiters))
+- (epoll-add*! (scheduler-epfd sched) fd
+- (logior active-events EPOLLONESHOT)))))
+- (#f
++ ((or #f (#f)) ;FD is new or was finalized
+ (let ((fd-waiters (list events (cons events task))))
+ (hashv-set! (scheduler-fd-waiters sched) fd fd-waiters)
+ (add-fdes-finalizer! fd (fd-finalizer fd-waiters))
+ (epoll-add*! (scheduler-epfd sched) fd
+- (logior events EPOLLONESHOT)))))))
++ (logior events EPOLLONESHOT))))
++ ((active-events . waiters)
++ (set-cdr! fd-waiters (acons events task waiters))
++ (unless (= (logand events active-events) events)
++ (let ((active-events (logior events active-events)))
++ (set-car! fd-waiters active-events)
++ (epoll-add*! (scheduler-epfd sched) fd
++ (logior active-events EPOLLONESHOT))))))))
+
+ (define (schedule-task-when-fd-readable sched fd task)
+ "Arrange to schedule @var{task} on @var{sched} when the file
diff --git a/gnu/packages/patches/icecat-use-older-reveal-hidden-html.patch b/gnu/packages/patches/icecat-use-older-reveal-hidden-html.patch
deleted file mode 100644
index 228adc23f1..0000000000
--- a/gnu/packages/patches/icecat-use-older-reveal-hidden-html.patch
+++ /dev/null
@@ -1,70 +0,0 @@
-From 2e8618d22568b6e00892a17303d437dd700eca98 Mon Sep 17 00:00:00 2001
-From: Mark H Weaver <mhw@netris.org>
-Date: Tue, 5 May 2020 21:27:41 -0400
-Subject: [PATCH] Revert "Update Reveal hidden HTML."
-
-I prefer the user interface of the old version.
-
-This reverts commit f6e3adb6b2344ee2c7bb453a305fd2d6fb4c194c.
----
- .../passive_improve_css.js | 23 ++++++++++++++-----
- 1 file changed, 17 insertions(+), 6 deletions(-)
-
-diff --git a/data/extensions/SubmitMe@0xbeef.coffee/passive_improve_css.js b/data/extensions/SubmitMe@0xbeef.coffee/passive_improve_css.js
-index 7692990..ca57982 100644
---- a/data/extensions/SubmitMe@0xbeef.coffee/passive_improve_css.js
-+++ b/data/extensions/SubmitMe@0xbeef.coffee/passive_improve_css.js
-@@ -10,6 +10,8 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
- */
-
-
-+var bad = [];
-+
- function escapeHTML (unsafe_str) {
- return unsafe_str
- .replace(/&/g, '&amp;')
-@@ -20,9 +22,14 @@ function escapeHTML (unsafe_str) {
- .replace(/\//g, '&#x2F;')
- }
-
-+function dounhide(){
-+ for(var i = 0; i < bad.length; i++){
-+ bad[i].remove();
-+ }
-+}
-+
- console.log("passive_improve_css.js");
- function reveal_css(){
-- var bad = [];
-
- var elements = document.getElementsByTagName("style");
- for(var i = 0; i < elements.length; i++){
-@@ -41,16 +48,20 @@ function reveal_css(){
-
- }
- }
-- if(bad.length > 0 && window.confirm("Hidden HTML detected, would you like to reveal it?")){
-- for(var i = 0; i < bad.length; i++){
-- bad[i].remove();
-- }
-+ if(bad.length > 0){
-+ const insertedDiv = document.createElement('div');
-+ insertedDiv.innerHTML= '<p id="unhide" class="button white" style="text-decoration:none!important; color:#000!important; font-size:1em !important; font-family:\'sans-serif\'!important; font-weight:normal !important; background-color:transparent!important; margin:0!important; padding:0!important; font-size:10px!important; line-height:1!important"' +
-+ 'alt="Click to reveal hidden elements in this page">' +
-+ '<span>Reveal hidden elements</span>' +
-+ '</a>';
-+ insertedDiv.style="position:fixed; bottom:1em; right:1em; opacity:0.8; z-index: 2147483647 !important; border-radius: 3px !important; background-color: #fff !important; padding: 0.5em !important; box-shadow: 0 0 3px grey !important; font-color:#bbb!important; cursor: pointer!important;";
-+ insertedDiv.addEventListener("click", dounhide, false);
-+ document.body.insertBefore(insertedDiv, document.body.firstChild);
- }
- }
-
- reveal_css();
-
--
- /*
- var a = document.getElementsByTagName("style")[2];
- var btn = document.createElement("style"); // Create a <button> element
---
-2.26.2
-
diff --git a/gnu/packages/patches/jami-disable-integration-tests.patch b/gnu/packages/patches/jami-disable-integration-tests.patch
new file mode 100644
index 0000000000..1efb7d552e
--- /dev/null
+++ b/gnu/packages/patches/jami-disable-integration-tests.patch
@@ -0,0 +1,111 @@
+From 3fd7acc6e5a8c316c366827956d7c06affdc9685 Mon Sep 17 00:00:00 2001
+From: Olivier Dion <olivier.dion@savoirfairelinux.com>
+Date: Fri, 7 Oct 2022 15:20:37 -0400
+Subject: [PATCH] test/unitTest: Seperate pure tests from integration tests
+
+So we can now do `make check` in a reproducible way.
+
+To enable integration tests `./configure --enable-integration-tests`.
+
+Change-Id: Iaa68c77abade9fe78bae881abc25e034bcb6a067
+---
+With file names adjusted for the source tarball.
+
+ configure.ac | 5 ++++
+ test/unitTest/Makefile.am | 52 +++++++++++++++++++++------------------
+ 2 files changed, 33 insertions(+), 24 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 27215d044..aaeef1c1b 100644
+--- a/daemon/configure.ac
++++ b/daemon/configure.ac
+@@ -40,6 +40,11 @@ AS_IF([test "x$enable_debug" = "xyes"],
+ [DAEMONCFLAGS+=" -DNDEBUG=1 -O3"
+ DAEMONCXXFLAGS+=" -DNDEBUG=1 -O3 -Wno-deprecated"])
+
++AC_ARG_ENABLE([enable_integration_tests],
++ AS_HELP_STRING([--enable-integration-tests],
++ [Also run integration tests with check target]))
++AM_CONDITIONAL([ENABLE_INTEGRATION_TESTS], [test "x$enable_integration_tests" = "xyes"])
++
+ AC_ARG_ENABLE([fuzzing],
+ AS_HELP_STRING([--enable-fuzzing],
+ [Build fuzzing tools]))
+diff --git a/test/unitTest/Makefile.am b/test/unitTest/Makefile.am
+index f2b0fd994..ab232ddb7 100644
+--- a/daemon/test/unitTest/Makefile.am
++++ b/daemon/test/unitTest/Makefile.am
+@@ -13,30 +13,6 @@ check_PROGRAMS =
+
+ ####### Unit Test ########
+
+-#
+-# account_factory
+-#
+-check_PROGRAMS += ut_account_factory
+-ut_account_factory_SOURCES = account_factory/testAccount_factory.cpp common.cpp
+-
+-#
+-# account_archive
+-#
+-check_PROGRAMS += ut_account_archive
+-ut_account_archive_SOURCES = account_archive/account_archive.cpp common.cpp
+-
+-#
+-# migration
+-#
+-check_PROGRAMS += ut_migration
+-ut_migration_SOURCES = account_archive/migration.cpp common.cpp
+-
+-#
+-# certstore
+-#
+-check_PROGRAMS += ut_certstore
+-ut_certstore_SOURCES = certstore.cpp common.cpp
+-
+ #
+ # scheduler
+ #
+@@ -127,6 +103,32 @@ ut_video_scaler_SOURCES = media/video/test_video_scaler.cpp common.cpp
+ check_PROGRAMS += ut_audio_frame_resizer
+ ut_audio_frame_resizer_SOURCES = media/audio/test_audio_frame_resizer.cpp common.cpp
+
++if ENABLE_INTEGRATION_TESTS
++
++#
++# account_factory
++#
++check_PROGRAMS += ut_account_factory
++ut_account_factory_SOURCES = account_factory/testAccount_factory.cpp common.cpp
++
++#
++# account_archive
++#
++check_PROGRAMS += ut_account_archive
++ut_account_archive_SOURCES = account_archive/account_archive.cpp common.cpp
++
++#
++# migration
++#
++check_PROGRAMS += ut_migration
++ut_migration_SOURCES = account_archive/migration.cpp common.cpp
++
++#
++# certstore
++#
++check_PROGRAMS += ut_certstore
++ut_certstore_SOURCES = certstore.cpp common.cpp
++
+ #
+ # call
+ #
+@@ -237,4 +239,6 @@ ut_sip_srtp_SOURCES = sip_account/sip_srtp.cpp
+ check_PROGRAMS += ut_plugins
+ ut_plugins_SOURCES = plugins/plugins.cpp common.cpp
+
++endif # ENABLE_INTEGRATION_TESTS
++
+ TESTS = $(check_PROGRAMS)
+--
+2.37.3
+
diff --git a/gnu/packages/patches/jami-fix-crash-on-block-contact.patch b/gnu/packages/patches/jami-fix-crash-on-block-contact.patch
deleted file mode 100644
index 60dab79296..0000000000
--- a/gnu/packages/patches/jami-fix-crash-on-block-contact.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 673dc5f525c9d478fc22f8ea0a50d9849a81f6c8 Mon Sep 17 00:00:00 2001
-From: Kateryna Kostiuk <kateryna.kostiuk@savoirfairelinux.com>
-Date: Fri, 2 Sep 2022 13:32:10 -0400
-Subject: [PATCH] conversation: fix crash when block contact
-
-This patch fixes crash that happened when block contact
-for not active account.
-
-GitLab: #758
-Change-Id: I5347394a67cdffe0d95c9ee03aedf9d2618cec55
----
- src/jamidht/jamiaccount.cpp | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/daemon/src/jamidht/jamiaccount.cpp b/daemon/src/jamidht/jamiaccount.cpp
-index fe15eedb2..9d0a5ee68 100644
---- a/daemon/src/jamidht/jamiaccount.cpp
-+++ b/daemon/src/jamidht/jamiaccount.cpp
-@@ -1129,7 +1129,9 @@ JamiAccount::loadAccount(const std::string& archive_password,
- if (auto convModule = shared->convModule())
- convModule->removeContact(uri, banned);
- // Remove current connections with contact
-- shared->connectionManager_->closeConnectionsWith(uri);
-+ if (shared->connectionManager_) {
-+ shared->connectionManager_->closeConnectionsWith(uri);
-+ }
- // Update client.
- emitSignal<DRing::ConfigurationSignal::ContactRemoved>(shared->getAccountID(),
- uri,
---
-2.37.3
-
diff --git a/gnu/packages/patches/jami-libjami-headers-search.patch b/gnu/packages/patches/jami-libjami-headers-search.patch
new file mode 100644
index 0000000000..44e099a610
--- /dev/null
+++ b/gnu/packages/patches/jami-libjami-headers-search.patch
@@ -0,0 +1,109 @@
+From 1168aba6181e30ee4c322e5f437bde22bece5698 Mon Sep 17 00:00:00 2001
+From: Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com>
+Date: Sat, 5 Nov 2022 23:18:13 -0400
+Subject: [PATCH 1/3] cmake: Simplify lookup logic for libjami headers.
+
+* extras/build/cmake/modules/FindLibJami.cmake: Rename
+LIBJAMI_INCLUDE_DIRS to LIBJAMI_INCLUDE_DIR. Use find_path to search
+for jami.h directory.
+* CMakeLists.txt: Adjust accordingly.
+* src/libclient/CMakeLists.txt: Likewise.
+* src/libclient/qtwrapper/CMakeLists.txt: Likewise.
+
+Change-Id: I494358f9bfafb41f000daeec4196747b2c184401
+---
+Upstream status: https://review.jami.net/c/jami-client-qt/+/22973
+
+ CMakeLists.txt | 2 +-
+ extras/build/cmake/modules/FindLibJami.cmake | 21 ++++++--------------
+ src/libclient/CMakeLists.txt | 2 +-
+ src/libclient/qtwrapper/CMakeLists.txt | 2 +-
+ 4 files changed, 9 insertions(+), 18 deletions(-)
+
+diff --git a/client-qt/CMakeLists.txt b/CMakeLists.txt
+index d0a8fd70..94ac6074 100644
+--- a/client-qt/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -118,7 +118,7 @@ set(CMAKE_MODULE_PATH
+ ${CMAKE_MODULE_PATH} "${EXTRAS_DIR}/build/cmake/modules")
+ find_package(LibJami REQUIRED)
+ if(LIBJAMI_FOUND)
+- include_directories(${LIBJAMI_INCLUDE_DIRS})
++ include_directories(${LIBJAMI_INCLUDE_DIR})
+ endif()
+
+ include(FindPython3)
+diff --git a/client-qt/extras/build/cmake/modules/FindLibJami.cmake b/extras/build/cmake/modules/FindLibJami.cmake
+index ddb05319..9ad20d2b 100644
+--- a/client-qt/extras/build/cmake/modules/FindLibJami.cmake
++++ b/extras/build/cmake/modules/FindLibJami.cmake
+@@ -20,28 +20,19 @@
+
+ # Once done, this find module will set:
+ #
+-# LIBJAMI_INCLUDE_DIRS - libjami include directories
++# LIBJAMI_INCLUDE_DIR - libjami include directories
+ # LIBJAMI_FOUND - whether it was able to find the include directories
+ # LIBJAMI_LIB - path to libjami or libring library
+
+ set(LIBJAMI_FOUND true)
+
+ if(WITH_DAEMON_SUBMODULE)
+- set(LIBJAMI_INCLUDE_DIRS ${DAEMON_DIR}/src/jami)
++ set(LIBJAMI_INCLUDE_DIR ${DAEMON_DIR}/src/jami)
+ else()
+- if(EXISTS ${LIBJAMI_INCLUDE_DIR}/jami.h)
+- set(LIBJAMI_INCLUDE_DIRS ${LIBJAMI_INCLUDE_DIR})
+- elseif(EXISTS ${LIBJAMI_BUILD_DIR}/jami/jami.h)
+- set(LIBJAMI_INCLUDE_DIRS ${LIBJAMI_BUILD_DIR}/jami)
+- elseif(EXISTS ${RING_INCLUDE_DIR}/jami.h)
+- set(LIBJAMI_INCLUDE_DIRS ${RING_INCLUDE_DIR})
+- elseif(EXISTS ${RING_BUILD_DIR}/jami/jami.h)
+- set(LIBJAMI_INCLUDE_DIRS ${RING_BUILD_DIR}/jami)
+- elseif(EXISTS ${CMAKE_INSTALL_PREFIX}/include/jami/jami.h)
+- set(LIBJAMI_INCLUDE_DIRS ${CMAKE_INSTALL_PREFIX}/include/jami)
+- else()
++ find_path(LIBJAMI_INCLUDE_DIR jami.h PATH_SUFFIXES jami)
++ if(NOT LIBJAMI_INCLUDE_DIR)
+ message(STATUS "Jami daemon headers not found!
+-Set -DLIBJAMI_BUILD_DIR or -DCMAKE_INSTALL_PREFIX")
++Set -DCMAKE_INSTALL_PREFIX or use -DWITH_DAEMON_SUBMODULE")
+ set(LIBJAMI_FOUND false)
+ endif()
+ endif()
+@@ -102,5 +93,5 @@ endif()
+ # Restore the original value of CMAKE_FIND_LIBRARY_SUFFIXES.
+ set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_orig})
+
+-message(STATUS "Jami daemon headers are in " ${LIBJAMI_INCLUDE_DIRS})
++message(STATUS "Jami daemon headers are in " ${LIBJAMI_INCLUDE_DIR})
+ message(STATUS "Jami daemon library is at " ${LIBJAMI_LIB})
+diff --git a/client-qt/src/libclient/CMakeLists.txt b/src/libclient/CMakeLists.txt
+index 2676c9c4..ac58ea2b 100644
+--- a/client-qt/src/libclient/CMakeLists.txt
++++ b/src/libclient/CMakeLists.txt
+@@ -62,7 +62,7 @@ set(CMAKE_MODULE_PATH
+ ${CMAKE_MODULE_PATH} "${EXTRAS_DIR}/build/cmake/modules")
+ find_package(LibJami REQUIRED)
+ if(LIBJAMI_FOUND)
+- include_directories(${LIBJAMI_INCLUDE_DIRS})
++ include_directories(${LIBJAMI_INCLUDE_DIR})
+ endif()
+
+ string(SUBSTRING ${CMAKE_GENERATOR} 0 14 CMAKE_GENERATOR_SHORT)
+diff --git a/client-qt/src/libclient/qtwrapper/CMakeLists.txt b/src/libclient/qtwrapper/CMakeLists.txt
+index acee0d0c..ba68aac4 100644
+--- a/client-qt/src/libclient/qtwrapper/CMakeLists.txt
++++ b/client-qt/src/libclient/qtwrapper/CMakeLists.txt
+@@ -46,7 +46,7 @@ else()
+ endif()
+
+ include_directories(${CMAKE_CURRENT_SOURCE_DIR})
+-include_directories(${LIBJAMI_INCLUDE_DIRS})
++include_directories(${LIBJAMI_INCLUDE_DIR})
+ include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../)
+ include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../dbus)
+
+--
+2.37.3
+
diff --git a/gnu/packages/patches/julia-SOURCE_DATE_EPOCH-mtime.patch b/gnu/packages/patches/julia-SOURCE_DATE_EPOCH-mtime.patch
index b60f284923..c6ca48fff0 100644
--- a/gnu/packages/patches/julia-SOURCE_DATE_EPOCH-mtime.patch
+++ b/gnu/packages/patches/julia-SOURCE_DATE_EPOCH-mtime.patch
@@ -8,15 +8,16 @@ Patch by Nicoló Balzarotti <nicolo@nixo.xyz>.
--- a/base/loading.jl
+++ b/base/loading.jl
-@@ -807,7 +807,10 @@
- path = normpath(joinpath(dirname(prev), _path))
+@@ -1131,7 +1131,10 @@ function _include_dependency(mod::Module, _path::AbstractString)
end
if _track_dependencies[]
+ @lock require_lock begin
- push!(_require_dependencies, (mod, path, mtime(path)))
+ push!(_require_dependencies,
+ (mod, path,
+ haskey(ENV, "SOURCE_DATE_EPOCH") ?
+ parse(Float64, ENV["SOURCE_DATE_EPOCH"]) : mtime(path)))
+ end
end
return path, prev
end
diff --git a/gnu/packages/patches/julia-allow-parallel-build.patch b/gnu/packages/patches/julia-allow-parallel-build.patch
deleted file mode 100644
index cc1d42fee4..0000000000
--- a/gnu/packages/patches/julia-allow-parallel-build.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-Allow parallel tests with isolated environment.
-
-See https://github.com/JuliaLang/julia/issues/43205 and
-https://github.com/JuliaLang/julia/pull/43211.
-
-diff --git a/test/runtests.jl b/test/runtests.jl
-index 2f9cd058bb..150395e78c 100644
---- a/test/runtests.jl
-+++ b/test/runtests.jl
-@@ -4,7 +4,7 @@ using Test
- using Distributed
- using Dates
- import REPL
--using Printf: @sprintf
-+using Printf: @sprintf, @printf
- using Base: Experimental
-
- include("choosetests.jl")
-@@ -83,11 +83,12 @@ prepend!(tests, linalg_tests)
- import LinearAlgebra
- cd(@__DIR__) do
- n = 1
-- if net_on
-+ if net_on || haskey(ENV, "JULIA_CPU_THREADS")
- n = min(Sys.CPU_THREADS, length(tests))
- n > 1 && addprocs_with_testenv(n)
- LinearAlgebra.BLAS.set_num_threads(1)
- end
-+ @printf("Number of threads: %i\n", n)
- skipped = 0
-
- @everywhere include("testdefs.jl")
diff --git a/gnu/packages/patches/julia-tracker-16-compat.patch b/gnu/packages/patches/julia-tracker-16-compat.patch
deleted file mode 100644
index 4fff423e44..0000000000
--- a/gnu/packages/patches/julia-tracker-16-compat.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-https://github.com/FluxML/Tracker.jl/commit/f6550ba38a9ea5802e2de4fa9c939929ba711f0d.patch
-from an upstream pull request
-https://github.com/FluxML/Tracker.jl/pull/94
-
-
-From f6550ba38a9ea5802e2de4fa9c939929ba711f0d Mon Sep 17 00:00:00 2001
-From: Michael Abbott <me@escbook>
-Date: Wed, 3 Feb 2021 22:58:33 +0100
-Subject: [PATCH] two fixes for 1.6
-
----
- src/lib/array.jl | 2 +-
- src/lib/real.jl | 1 +
- 2 files changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/src/lib/array.jl b/src/lib/array.jl
-index 92f2b39..f8cbbac 100644
---- a/src/lib/array.jl
-+++ b/src/lib/array.jl
-@@ -298,7 +298,7 @@ Base.reverse(xs::TrackedArray; dims) = track(reverse, xs, dims = dims)
- @grad reverse(xs; dims) = reverse(data(xs), dims = dims), Δ -> (reverse(Δ, dims = dims), nothing)
- Base.reverse(xs::TrackedVector) = track(reverse, xs)
- @grad reverse(xs::TrackedVector) = reverse(data(xs)), Δ -> (reverse(Δ),)
--Base.reverse(xs::TrackedVector, start, stop) = track(reverse, xs, start, stop)
-+Base.reverse(xs::TrackedVector, start::Integer, stop::Integer) = track(reverse, xs, start, stop)
- @grad reverse(xs, start, stop) = reverse(data(xs), start, stop), Δ -> (reverse(Δ, start, stop), nothing, nothing)
-
- function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix)
-diff --git a/src/lib/real.jl b/src/lib/real.jl
-index 737afd8..e1975ac 100644
---- a/src/lib/real.jl
-+++ b/src/lib/real.jl
-@@ -55,6 +55,7 @@ for f in :[isinf, isnan, isfinite].args
- end
-
- Printf.fix_dec(x::TrackedReal, n::Int, a...) = Printf.fix_dec(data(x), n, a...)
-+Printf.tofloat(x::TrackedReal) = Printf.tofloat(data(x))
-
- Base.float(x::TrackedReal) = x
-
diff --git a/gnu/packages/patches/kcontacts-incorrect-country-name.patch b/gnu/packages/patches/kcontacts-incorrect-country-name.patch
new file mode 100644
index 0000000000..7e15f9f5eb
--- /dev/null
+++ b/gnu/packages/patches/kcontacts-incorrect-country-name.patch
@@ -0,0 +1,85 @@
+From 5fc2ce8b9f34ea3218a030aeede01b70bedb2546 Mon Sep 17 00:00:00 2001
+From: Petr Hodina <phodina@protonmail.com>
+Date: Wed, 28 Sep 2022 20:58:43 +0200
+Subject: [PATCH] autotests: Fix incorrect name of country in comparison.
+
+
+diff --git a/autotests/addresstest.cpp b/autotests/addresstest.cpp
+index fc6a9bc7..01e474b6 100644
+--- a/autotests/addresstest.cpp
++++ b/autotests/addresstest.cpp
+@@ -157,7 +157,7 @@ void AddressTest::formatTest()
+
+ const QString result(
+ QStringLiteral("Jim Knopf\nLummerlandstr. 1\n"
+- "12345 Lummerstadt\n\nGERMANIA"));
++ "12345 Lummerstadt\n\nGERMANY"));
+
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QStringLiteral("Jim Knopf")), result);
+ #if KCONTACTS_BUILD_DEPRECATED_SINCE(5, 92)
+@@ -175,7 +175,7 @@ void AddressTest::formatTest()
+
+ const QString result(
+ QStringLiteral("Huck Finn\n457 Foobar Ave\nNERVOUSBREAKTOWN,"
+- " DC 1A2B3C\n\nSTATI UNITI"));
++ " DC 1A2B3C\n\nUNITED STATES"));
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QStringLiteral("Huck Finn")), result);
+ }
+
+@@ -188,7 +188,7 @@ void AddressTest::formatTest()
+
+ const QString result(
+ QStringLiteral("Jim Knopf\nLummerlandstr. 1\n"
+- "12345 Lummerstadt\n\nGERMANIA"));
++ "12345 Lummerstadt\n\nGERMANY"));
+
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QStringLiteral("Jim Knopf")), result);
+ }
+@@ -214,7 +214,7 @@ void AddressTest::formatTest()
+ address.setCountry(QStringLiteral("Schweiz"));
+
+ // we want the Italian variant of the Swiss format for it_CH
+- const QString result(QStringLiteral("Dr. Konqui\nCasella postale 5678\nHaus Randa\n1234 Randa\n\nSVIZZERA"));
++ const QString result(QStringLiteral("Dr. Konqui\nCasella postale 5678\nHaus Randa\n1234 Randa\n\nSWITZERLAND"));
+
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QStringLiteral("Dr. Konqui")), result);
+ }
+@@ -228,14 +228,14 @@ void AddressTest::formatTest()
+ address.setCountry(QStringLiteral("CH"));
+
+ // we want the Italian variant of the Swiss format for it_CH
+- const QString result(QStringLiteral("Dr. Konqui\nCasella postale 5678\nHaus Randa\n1234 Randa\n\nSVIZZERA"));
++ const QString result(QStringLiteral("Dr. Konqui\nCasella postale 5678\nHaus Randa\n1234 Randa\n\nSWITZERLAND"));
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QStringLiteral("Dr. Konqui")), result);
+ }
+
+ {
+ KContacts::Address address;
+ address.setCountry(QStringLiteral("CH"));
+- QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QString()), QLatin1String("SVIZZERA"));
++ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal, QString()), QLatin1String("SWITZERLAND"));
+ }
+
+ {
+@@ -257,14 +257,14 @@ void AddressTest::formatTest()
+ address.setLocality(QStringLiteral("Minato-ku"));
+ address.setPostalCode(QStringLiteral("106-0047"));
+ address.setStreet(QStringLiteral("4-6-28 Minami-Azabu"));
+- auto result = QString::fromUtf8("4-6-28 Minami-Azabu, Minato-ku\nTOKYO 106-0047\n\nGIAPPONE");
++ auto result = QString::fromUtf8("4-6-28 Minami-Azabu, Minato-ku\nTOKYO 106-0047\n\nJAPAN");
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal), result);
+
+ address.setRegion(QStringLiteral("æ±äº¬"));
+ address.setLocality(QStringLiteral("都港区"));
+ address.setPostalCode(QStringLiteral("106-0047"));
+ address.setStreet(QStringLiteral("å—麻布 4-6-28"));
+- result = QString::fromUtf8("〒106-0047\næ±äº¬éƒ½æ¸¯åŒºå—麻布 4-6-28\n\nGIAPPONE");
++ result = QString::fromUtf8("〒106-0047\næ±äº¬éƒ½æ¸¯åŒºå—麻布 4-6-28\n\nJAPAN");
+ QCOMPARE(address.formatted(KContacts::AddressFormatStyle::Postal), result);
+ }
+ }
+
+base-commit: 1d757eba019718cab5d3b33a231d19daf31eb8ba
+--
+2.37.2
+
diff --git a/gnu/packages/patches/kde-cli-tools-delay-mime-db.patch b/gnu/packages/patches/kde-cli-tools-delay-mime-db.patch
new file mode 100644
index 0000000000..e29b3b192d
--- /dev/null
+++ b/gnu/packages/patches/kde-cli-tools-delay-mime-db.patch
@@ -0,0 +1,26 @@
+From f8cfb96a1540fc3256af95adf8003b75c305183c Mon Sep 17 00:00:00 2001
+From: Petr Hodina <phodina@protonmail.com>
+Date: Fri, 30 Sep 2022 21:55:55 +0200
+Subject: [PATCH] Add delay to update the mime db in testCreateMimeType test
+ function.
+
+
+diff --git a/keditfiletype/tests/filetypestest.cpp b/keditfiletype/tests/filetypestest.cpp
+index e58e6f2..a41d4ad 100644
+--- a/keditfiletype/tests/filetypestest.cpp
++++ b/keditfiletype/tests/filetypestest.cpp
+@@ -315,6 +315,9 @@ private Q_SLOTS:
+ QVERIFY(data.isDirty());
+ QVERIFY(data.sync());
+ MimeTypeWriter::runUpdateMimeDatabase();
++ // QMimeDatabase doesn't even try to update the cache if less than
++ // 5000 ms have passed (can't use qmime_secondsBetweenChecks)
++ QTest::qSleep(5000);
+ QMimeType mime = db.mimeTypeForName(mimeTypeName);
+ QVERIFY(mime.isValid());
+ QCOMPARE(mime.comment(), fakeComment);
+
+base-commit: b5911cda17521156b22429436e19b508aa442a57
+--
+2.37.2
+
diff --git a/gnu/packages/patches/kodi-increase-test-timeout.patch b/gnu/packages/patches/kodi-increase-test-timeout.patch
deleted file mode 100644
index 8fb149ff9d..0000000000
--- a/gnu/packages/patches/kodi-increase-test-timeout.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-Increase thread timeout to reduce flakiness.
-
-Taken from upstream:
-https://github.com/xbmc/xbmc/commit/574b0182d8b641fd24029f372ebdcccc897123e2
-
-diff --git a/xbmc/threads/test/TestEvent.cpp b/xbmc/threads/test/TestEvent.cpp
-index 42fb8c2fc609..40e644c0ed3c 100644
---- a/xbmc/threads/test/TestEvent.cpp
-+++ b/xbmc/threads/test/TestEvent.cpp
-@@ -484,7 +484,7 @@ TEST(TestEvent, GroupTimedWait)
- EXPECT_TRUE(w3.result == NULL);
-
- // this should end given the wait is for only 50 millis
-- EXPECT_TRUE(waitThread3.timed_join(MILLIS(100)));
-+ EXPECT_TRUE(waitThread3.timed_join(MILLIS(200)));
-
- EXPECT_TRUE(!w3.waiting);
- EXPECT_TRUE(w3.result == NULL);
diff --git a/gnu/packages/patches/kodi-set-libcurl-ssl-parameters.patch b/gnu/packages/patches/kodi-set-libcurl-ssl-parameters.patch
index 2f60737e30..99d8a45de6 100644
--- a/gnu/packages/patches/kodi-set-libcurl-ssl-parameters.patch
+++ b/gnu/packages/patches/kodi-set-libcurl-ssl-parameters.patch
@@ -3,15 +3,16 @@ connections work we can set them based on SSL_CERT_DIR and SSL_CERT_FILE.
--- a/xbmc/filesystem/CurlFile.cpp
+++ b/xbmc/filesystem/CurlFile.cpp
-@@ -626,5 +626,9 @@
+@@ -626,8 +626,12 @@
if (!m_cipherlist.empty())
g_curlInterface.easy_setopt(h, CURLOPT_SSL_CIPHER_LIST, m_cipherlist.c_str());
-
+
+ // Load certificate data from environment paths
+ g_curlInterface.easy_setopt(m_state->m_easyHandle, CURLOPT_CAPATH, getenv("SSL_CERT_DIR"));
+ g_curlInterface.easy_setopt(m_state->m_easyHandle, CURLOPT_CAINFO, getenv("SSL_CERT_FILE"));
+
- // enable HTTP2 support. default: CURL_HTTP_VERSION_1_1. Curl >= 7.62.0 defaults to CURL_HTTP_VERSION_2TLS
- g_curlInterface.easy_setopt(h, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2_0);
--
- }
+ if (CServiceBroker::GetSettingsComponent()->GetAdvancedSettings()->m_curlDisableHTTP2)
+ g_curlInterface.easy_setopt(h, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
+ else
+ // enable HTTP2 support. default: CURL_HTTP_VERSION_1_1. Curl >= 7.62.0 defaults to CURL_HTTP_VERSION_2TLS
+ g_curlInterface.easy_setopt(h, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
diff --git a/gnu/packages/patches/kodi-skip-test-449.patch b/gnu/packages/patches/kodi-skip-test-449.patch
deleted file mode 100644
index a418239a7c..0000000000
--- a/gnu/packages/patches/kodi-skip-test-449.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-This test fails regularly between 18.0rc3 and 18.0rc5.2
-
-449/520 Test #449: TestWebServer.CanHeadFile................................................***Failed 0.90 sec
-Note: Google Test filter = TestWebServer.CanHeadFile
-[==========] Running 1 test from 1 test case.
-[----------] Global test environment set-up.
-[----------] 1 test from TestWebServer
-[ RUN ] TestWebServer.CanHeadFile
-/tmp/guix-build-kodi-18.0rc5.2.drv-0/kodi-18.0rc5.2-checkout/xbmc/network/test/TestWebServer.cpp:156: Failure
- Expected: "4"
-To be equal to: httpHeader.GetValue("Content-Length").c_str()
- Which is: "0"
-[ FAILED ] TestWebServer.CanHeadFile (6 ms)
-[----------] 1 test from TestWebServer (6 ms total)
-
-[----------] Global test environment tear-down
-[==========] 1 test from 1 test case ran. (635 ms total)
-[ PASSED ] 0 tests.
-[ FAILED ] 1 test, listed below:
-[ FAILED ] TestWebServer.CanHeadFile
-
----
- xbmc/network/test/TestWebServer.cpp | 14 +++++++-------
- 1 file changed, 7 insertions(+), 7 deletions(-)
-
-diff --git a/xbmc/network/test/TestWebServer.cpp b/xbmc/network/test/TestWebServer.cpp
-index a87d9f4..b2240f4 100644
---- a/xbmc/network/test/TestWebServer.cpp
-+++ b/xbmc/network/test/TestWebServer.cpp
-@@ -520,13 +520,13 @@ TEST_F(TestWebServer, CanNotHeadNonExistingFile)
- ASSERT_FALSE(curl.Exists(CURL(GetUrlOfTestFile("file_does_not_exist"))));
- }
-
--TEST_F(TestWebServer, CanHeadFile)
--{
-- CCurlFile curl;
-- ASSERT_TRUE(curl.Exists(CURL(GetUrlOfTestFile(TEST_FILES_HTML))));
--
-- CheckHtmlTestFileResponse(curl);
--}
-+//TEST_F(TestWebServer, CanHeadFile)
-+//{
-+// CCurlFile curl;
-+// ASSERT_TRUE(curl.Exists(CURL(GetUrlOfTestFile(TEST_FILES_HTML))));
-+//
-+// CheckHtmlTestFileResponse(curl);
-+//}
-
- TEST_F(TestWebServer, CanNotGetNonExistingFile)
- {
---
-2.20.1
-
diff --git a/gnu/packages/patches/kwayland-skip-flaky-test.patch b/gnu/packages/patches/kwayland-skip-flaky-test.patch
new file mode 100644
index 0000000000..592ab3cc77
--- /dev/null
+++ b/gnu/packages/patches/kwayland-skip-flaky-test.patch
@@ -0,0 +1,13 @@
+This test fails in the Guix CI for unknown and difficult to debug reasons.
+
+diff --git a/autotests/client/test_plasma_window_model.cpp b/autotests/client/test_plasma_window_model.cpp
+--- a/autotests/client/test_plasma_window_model.cpp
++++ b/autotests/client/test_plasma_window_model.cpp
+@@ -545,6 +545,7 @@ void PlasmaWindowModelTest::testPid()
+
+ void PlasmaWindowModelTest::testVirtualDesktops()
+ {
++ QSKIP("This test fails on the Guix CI (FIXME)");
+ auto model = m_pw->createWindowModel();
+ QVERIFY(model);
+ QSignalSpy rowInsertedSpy(model, &PlasmaWindowModel::rowsInserted);
diff --git a/gnu/packages/patches/libgeotiff-fix-tests-with-proj-9.1.1.patch b/gnu/packages/patches/libgeotiff-fix-tests-with-proj-9.1.1.patch
new file mode 100644
index 0000000000..b4bf292c1e
--- /dev/null
+++ b/gnu/packages/patches/libgeotiff-fix-tests-with-proj-9.1.1.patch
@@ -0,0 +1,100 @@
+From: Even Rouault <even.rouault@spatialys.com>
+Date: Sat, 26 Nov 2022 13:23:12 +0100
+Subject: Fix test failures with PROJ 9.1.1
+
+https://github.com/OSGeo/libgeotiff/pull/82
+---
+ bin/listgeo.c | 7 ++++++-
+ test/testlistgeo | 4 ++--
+ test/testlistgeo_out.dist | 14 --------------
+ 3 files changed, 8 insertions(+), 17 deletions(-)
+
+diff --git a/bin/listgeo.c b/bin/listgeo.c
+index 06c45f70..acad54c6 100644
+--- a/bin/listgeo.c
++++ b/bin/listgeo.c
+@@ -29,6 +29,7 @@ void Usage()
+ " -tfw: Generate a .tfw (ESRI TIFF World) file for the target file.\n"
+ " -proj4: Report PROJ.4 equivalent projection definition.\n"
+ " -no_norm: Don't report 'normalized' parameter values.\n"
++ " -no_corners: Don't report corner coordinates.\n"
+ " filename: Name of the GeoTIFF file to report on.\n" );
+
+ exit( 1 );
+@@ -42,6 +43,7 @@ int main(int argc, char *argv[])
+ int i, norm_print_flag = 1, proj4_print_flag = 0;
+ int tfw_flag = 0, inv_flag = 0, dec_flag = 0;
+ int st_test_flag = 0;
++ int corners = 1;
+
+ /*
+ * Handle command line options.
+@@ -50,6 +52,8 @@ int main(int argc, char *argv[])
+ {
+ if( strcmp(argv[i],"-no_norm") == 0 )
+ norm_print_flag = 0;
++ else if( strcmp(argv[i],"-no_corners") == 0 )
++ corners = 0;
+ else if( strcmp(argv[i],"-tfw") == 0 )
+ tfw_flag = 1;
+ else if( strcmp(argv[i],"-proj4") == 0 )
+@@ -130,7 +134,8 @@ int main(int argc, char *argv[])
+
+ TIFFGetField( tif, TIFFTAG_IMAGEWIDTH, &xsize );
+ TIFFGetField( tif, TIFFTAG_IMAGELENGTH, &ysize );
+- GTIFPrintCorners( gtif, &defn, stdout, xsize, ysize, inv_flag, dec_flag );
++ if( corners )
++ GTIFPrintCorners( gtif, &defn, stdout, xsize, ysize, inv_flag, dec_flag );
+ }
+
+ }
+diff --git a/test/testlistgeo b/test/testlistgeo
+index 596301b4..9a41e74f 100755
+--- a/test/testlistgeo
++++ b/test/testlistgeo
+@@ -59,11 +59,11 @@ $EXE ${DATA_DIR}/ProjLinearUnitsGeoKey_9036.tif >>${OUT}
+ echo "" >>${OUT}
+
+ echo "Testing listgeo ProjectedCSTypeGeoKey_28191_cassini_soldner.tif" >> ${OUT}
+-$EXE ${DATA_DIR}/ProjectedCSTypeGeoKey_28191_cassini_soldner.tif >>${OUT}
++$EXE -no_corners ${DATA_DIR}/ProjectedCSTypeGeoKey_28191_cassini_soldner.tif >>${OUT}
+ echo "" >>${OUT}
+
+ echo "Testing listgeo cassini_soldner.tif" >> ${OUT}
+-$EXE ${DATA_DIR}/cassini_soldner.tif >>${OUT}
++$EXE -no_corners ${DATA_DIR}/cassini_soldner.tif >>${OUT}
+ echo "" >>${OUT}
+
+ echo "Testing listgeo ProjectedCSTypeGeoKey_27200_new_zealand_mapping_grid.tif" >> ${OUT}
+diff --git a/test/testlistgeo_out.dist b/test/testlistgeo_out.dist
+index 742f0fce..20221cec 100644
+--- a/test/testlistgeo_out.dist
++++ b/test/testlistgeo_out.dist
+@@ -299,13 +299,6 @@ Ellipsoid: 7010/Clarke 1880 (Benoit) (6378300.79,6356566.43)
+ Prime Meridian: 8901/Greenwich (0.000000/ 0d 0' 0.00"E)
+ Projection Linear Units: 9001/metre (1.000000m)
+
+-Corner Coordinates:
+-Upper Left ( 440720.000, 3751320.000) ( 40d47'28.08"E, 64d13'29.57"N)
+-Lower Left ( 440720.000, 3751260.000) ( 40d47'27.69"E, 64d13'27.64"N)
+-Upper Right ( 440780.000, 3751320.000) ( 40d47'32.51"E, 64d13'29.40"N)
+-Lower Right ( 440780.000, 3751260.000) ( 40d47'32.12"E, 64d13'27.47"N)
+-Center ( 440750.000, 3751290.000) ( 40d47'30.10"E, 64d13'28.52"N)
+-
+ Testing listgeo cassini_soldner.tif
+ Geotiff_Information:
+ Version: 1
+@@ -348,13 +341,6 @@ Projection Method: CT_CassiniSoldner
+ ProjFalseNorthingGeoKey: 126867.909000 m
+ Projection Linear Units: 9001/metre (1.000000m)
+
+-Corner Coordinates:
+-Upper Left ( 440720.000, 3751320.000) ( 40d47'28.08"E, 64d13'29.57"N)
+-Lower Left ( 440720.000, 3751260.000) ( 40d47'27.69"E, 64d13'27.64"N)
+-Upper Right ( 440780.000, 3751320.000) ( 40d47'32.51"E, 64d13'29.40"N)
+-Lower Right ( 440780.000, 3751260.000) ( 40d47'32.12"E, 64d13'27.47"N)
+-Center ( 440750.000, 3751290.000) ( 40d47'30.10"E, 64d13'28.52"N)
+-
+ Testing listgeo ProjectedCSTypeGeoKey_27200_new_zealand_mapping_grid.tif
+ Geotiff_Information:
+ Version: 1
diff --git a/gnu/packages/patches/libksysguard-qdiriterator-follow-symlinks.patch b/gnu/packages/patches/libksysguard-qdiriterator-follow-symlinks.patch
new file mode 100644
index 0000000000..ec4a34037d
--- /dev/null
+++ b/gnu/packages/patches/libksysguard-qdiriterator-follow-symlinks.patch
@@ -0,0 +1,24 @@
+From 46164a50de4102d02ae9d1d480acdd4b12303db8 Mon Sep 17 00:00:00 2001
+From: Thomas Tuegel <ttuegel@gmail.com>
+Date: Wed, 14 Oct 2015 07:07:22 -0500
+Subject: [PATCH] qdiriterator follow symlinks
+
+---
+ processui/scripting.cpp | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/processui/scripting.cpp b/processui/scripting.cpp
+index efed8ff..841761a 100644
+--- a/processui/scripting.cpp
++++ b/processui/scripting.cpp
+@@ -293,7 +293,7 @@ void Scripting::loadContextMenu()
+ const QStringList dirs =
+ QStandardPaths::locateAll(QStandardPaths::GenericDataLocation, QStringLiteral("ksysguard/scripts/"), QStandardPaths::LocateDirectory);
+ for (const QString &dir : dirs) {
+- QDirIterator it(dir, QStringList() << QStringLiteral("*.desktop"), QDir::NoFilter, QDirIterator::Subdirectories);
++ QDirIterator it(dir, QStringList() << QStringLiteral("*.desktop"), QDir::NoFilter, QDirIterator::Subdirectories | QDirIterator::FollowSymlinks);
+ while (it.hasNext()) {
+ scripts.append(it.next());
+ }
+--
+2.5.2
diff --git a/gnu/packages/patches/libunwind-julia-fix-GCC10-fno-common.patch b/gnu/packages/patches/libunwind-julia-fix-GCC10-fno-common.patch
deleted file mode 100644
index 8ef4b111e4..0000000000
--- a/gnu/packages/patches/libunwind-julia-fix-GCC10-fno-common.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-Fix compilation with -fno-common.
-
-Borrowed from upstream 29e17d8d2ccbca07c423e3089a6d5ae8a1c9cb6e.
-Author: Yichao Yu <yyc1992@gmail.com>
-AuthorDate: Tue Mar 31 00:43:32 2020 -0400
-Commit: Dave Watson <dade.watson@gmail.com>
-CommitDate: Tue Mar 31 08:06:29 2020 -0700
-
-diff --git a/src/x86/Ginit.c b/src/x86/Ginit.c
-index f6b8dc2..9550efa 100644
---- a/src/x86/Ginit.c
-+++ b/src/x86/Ginit.c
-@@ -54,13 +54,6 @@ tdep_uc_addr (ucontext_t *uc, int reg)
-
- # endif /* UNW_LOCAL_ONLY */
-
--HIDDEN unw_dyn_info_list_t _U_dyn_info_list;
--
--/* XXX fix me: there is currently no way to locate the dyn-info list
-- by a remote unwinder. On ia64, this is done via a special
-- unwind-table entry. Perhaps something similar can be done with
-- DWARF2 unwind info. */
--
- static void
- put_unwind_info (unw_addr_space_t as, unw_proc_info_t *proc_info, void *arg)
- {
-@@ -71,7 +64,12 @@ static int
- get_dyn_info_list_addr (unw_addr_space_t as, unw_word_t *dyn_info_list_addr,
- void *arg)
- {
-- *dyn_info_list_addr = (unw_word_t) &_U_dyn_info_list;
-+#ifndef UNW_LOCAL_ONLY
-+# pragma weak _U_dyn_info_list_addr
-+ if (!_U_dyn_info_list_addr)
-+ return -UNW_ENOINFO;
-+#endif
-+ *dyn_info_list_addr = _U_dyn_info_list_addr ();
- return 0;
- }
-
diff --git a/gnu/packages/patches/linux-libre-infodocs-target.patch b/gnu/packages/patches/linux-libre-infodocs-target.patch
new file mode 100644
index 0000000000..e9e6495c26
--- /dev/null
+++ b/gnu/packages/patches/linux-libre-infodocs-target.patch
@@ -0,0 +1,88 @@
+Upstream status: https://marc.info/?l=linux-doc&m=166861669723994&w=2
+
+From 7edb5b278ee383ff9bed525329b2cdbe22317bf2 Mon Sep 17 00:00:00 2001
+From: Maxim Cournoyer <maxim.cournoyer@gmail.com>
+Date: Mon, 14 Nov 2022 22:51:11 -0500
+Subject: [PATCH] doc: add texinfodocs and infodocs targets
+
+Sphinx supports generating Texinfo sources and Info documentation,
+which can be navigated easily and is convenient to search (via the
+indexed nodes or anchors, for example).
+
+This change also causes the html output to appear under its own output
+sub-directory, which makes it easier to install, since it's clean from
+.doctrees or other output formats.
+
+Signed-off-by: Maxim Cournoyer <maxim.cournoyer@gmail.com>
+---
+ Documentation/Makefile | 13 ++++++++++++-
+ Documentation/userspace-api/media/Makefile | 3 ++-
+ Makefile | 2 +-
+ 3 files changed, 15 insertions(+), 3 deletions(-)
+
+diff --git a/Documentation/Makefile b/Documentation/Makefile
+index 64d44c1ecad3..bd8dac560633 100644
+--- a/Documentation/Makefile
++++ b/Documentation/Makefile
+@@ -93,7 +93,16 @@ quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(BUILDDIR)/$3/$4)
+
+ htmldocs:
+ @$(srctree)/scripts/sphinx-pre-install --version-check
+- @+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,html,$(var),,$(var)))
++ @+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,html,$(var),html,$(var)))
++
++texinfodocs:
++ @$(srctree)/scripts/sphinx-pre-install --version-check
++ @+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,texinfo,$(var),texinfo,$(var)))
++
++# Note: the 'info' Make target is generated by sphinx itself when
++# running the texinfodocs target define above.
++infodocs: texinfodocs
++ $(MAKE) -C $(BUILDDIR)/texinfo info
+
+ linkcheckdocs:
+ @$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,linkcheck,$(var),,$(var)))
+@@ -143,6 +152,8 @@ cleandocs:
+ dochelp:
+ @echo ' Linux kernel internal documentation in different formats from ReST:'
+ @echo ' htmldocs - HTML'
++ @echo ' texinfodocs - Texinfo'
++ @echo ' infodocs - Info'
+ @echo ' latexdocs - LaTeX'
+ @echo ' pdfdocs - PDF'
+ @echo ' epubdocs - EPUB'
+diff --git a/Documentation/userspace-api/media/Makefile b/Documentation/userspace-api/media/Makefile
+index 00922aa7efde..3d8aaf5c253b 100644
+--- a/Documentation/userspace-api/media/Makefile
++++ b/Documentation/userspace-api/media/Makefile
+@@ -47,10 +47,11 @@ $(BUILDDIR)/lirc.h.rst: ${UAPI}/lirc.h ${PARSER} $(SRC_DIR)/lirc.h.rst.exception
+
+ # Media build rules
+
+-.PHONY: all html epub xml latex
++.PHONY: all html texinfo epub xml latex
+
+ all: $(IMGDOT) $(BUILDDIR) ${TARGETS}
+ html: all
++texinfo: all
+ epub: all
+ xml: all
+ latex: $(IMGPDF) all
+diff --git a/Makefile b/Makefile
+index 58cd4f5e1c3a..b3266c408b6c 100644
+--- a/Makefile
++++ b/Makefile
+@@ -1785,7 +1785,7 @@ $(help-board-dirs): help-%:
+ # Documentation targets
+ # ---------------------------------------------------------------------------
+ DOC_TARGETS := xmldocs latexdocs pdfdocs htmldocs epubdocs cleandocs \
+- linkcheckdocs dochelp refcheckdocs
++ linkcheckdocs dochelp refcheckdocs texinfodocs infodocs
+ PHONY += $(DOC_TARGETS)
+ $(DOC_TARGETS):
+ $(Q)$(MAKE) $(build)=Documentation $@
+
+base-commit: 81e7cfa3a9eb4ba6993a9c71772fdab21bc5d870
+--
+2.38.1
+
diff --git a/gnu/packages/patches/lirc-reproducible-build.patch b/gnu/packages/patches/lirc-reproducible-build.patch
index 20f9344715..75aa480941 100644
--- a/gnu/packages/patches/lirc-reproducible-build.patch
+++ b/gnu/packages/patches/lirc-reproducible-build.patch
@@ -5,11 +5,22 @@ https://sourceforge.net/p/lirc/git/merge-requests/33/
https://sourceforge.net/p/lirc/git/merge-requests/34/
https://sourceforge.net/p/lirc/git/merge-requests/36/
-Index: lirc-0.10.1/tools/lirc-lsplugins.cpp
-===================================================================
---- lirc-0.10.1.orig/tools/lirc-lsplugins.cpp
-+++ lirc-0.10.1/tools/lirc-lsplugins.cpp
-@@ -415,10 +415,9 @@ static void print_header(void)
+diff -Naur lirc-0.10.2a/python-pkg/lirc/database.py lirc-0.10.2/python-pkg/lirc/database.py
+--- lirc-0.10.2a/python-pkg/lirc/database.py 1970-01-01 01:00:01.000000000 +0100
++++ lirc-0.10.2/python-pkg/lirc/database.py 2023-01-20 14:23:29.414088668 +0100
+@@ -160,7 +160,7 @@
+ d['device_hint'] = hint
+
+ configs = {}
+- for path in glob.glob(configdir + '/*.conf'):
++ for path in sorted(glob.glob(configdir + '/*.conf')):
+ with open(path) as f:
+ cf = yaml.load(f.read(), Loader = Loader)
+ configs[cf['config']['id']] = cf['config']
+diff -Naur lirc-0.10.2a/tools/lirc-lsplugins.cpp lirc-0.10.2/tools/lirc-lsplugins.cpp
+--- lirc-0.10.2a/tools/lirc-lsplugins.cpp 1970-01-01 01:00:01.000000000 +0100
++++ lirc-0.10.2/tools/lirc-lsplugins.cpp 2023-01-20 14:24:42.719085612 +0100
+@@ -413,10 +413,9 @@
static void print_yaml_header(void)
{
static const char* const YAML_HEADER =
@@ -22,51 +33,3 @@ Index: lirc-0.10.1/tools/lirc-lsplugins.cpp
printf("\ndrivers:\n");
}
-Index: lirc-0.10.1/python-pkg/lirc/database.py
-===================================================================
---- lirc-0.10.1.orig/python-pkg/lirc/database.py
-+++ lirc-0.10.1/python-pkg/lirc/database.py
-@@ -156,7 +156,7 @@ class Database(object):
- d['device_hint'] = hint
-
- configs = {}
-- for path in glob.glob(configdir + '/*.conf'):
-+ for path in sorted(glob.glob(configdir + '/*.conf')):
- with open(path) as f:
- cf = yaml.load(f.read())
- configs[cf['config']['id']] = cf['config']
-Index: lirc-0.10.1/tools/irdb-get
-===================================================================
---- lirc-0.10.1.orig/tools/irdb-get
-+++ lirc-0.10.1/tools/irdb-get
-@@ -9,7 +9,6 @@ import fnmatch
- import os
- import os.path
- import sys
--import time
- import urllib.error # pylint: disable=no-name-in-module,F0401,E0611
- import urllib.request # pylint: disable=no-name-in-module,F0401,E0611
-
-@@ -193,7 +192,7 @@ def do_yaml_config():
- lircmd_by_driver[driver].append("%s/%s" % (tokens[0], tokens[2]))
-
- print("#")
-- print("# Created by 'irdb-get yaml-config' at " + time.ctime())
-+ print("# Created by 'irdb-get yaml-config'")
- print("#")
- print("\nlircd_by_driver:")
- print_yaml_dict(lircd_by_driver)
-Index: lirc-0.10.1/tools/lirc-make-devinput
-===================================================================
---- lirc-0.10.1.orig/tools/lirc-make-devinput
-+++ lirc-0.10.1/tools/lirc-make-devinput
-@@ -61,8 +61,7 @@ if test -n "$lirc_map"; then
- fi
-
-
--echo "# Generated by $(basename $0) on $(uname -r)"
--echo "# Date: $(date)"
-+echo "# Generated by $(basename $0)"
- cat <<EOF
-
- begin remote
diff --git a/gnu/packages/patches/luajit-no_ldconfig.patch b/gnu/packages/patches/luajit-no_ldconfig.patch
deleted file mode 100644
index 8000e8e4e5..0000000000
--- a/gnu/packages/patches/luajit-no_ldconfig.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 629200f48e18dc1a3a5229739748bad0e2a6a0a2 Mon Sep 17 00:00:00 2001
-From: =?utf8?q?Tom=C3=A1=C5=A1=20=C4=8Cech?= <sleep_walker@suse.cz>
-Date: Wed, 4 Feb 2015 11:37:16 +0100
-Subject: [PATCH] Do not silently and prematurely end install when ldconfig is
- missing
-
----
- Makefile | 7 ++++---
- 1 file changed, 4 insertions(+), 3 deletions(-)
-
-diff --git a/Makefile b/Makefile
-index 343ecb5..4667146 100644
---- a/Makefile
-+++ b/Makefile
-@@ -112,9 +112,10 @@ install: $(INSTALL_DEP)
- $(RM) $(INSTALL_TSYM) $(INSTALL_DYN) $(INSTALL_SHORT1) $(INSTALL_SHORT2)
- cd src && test -f $(FILE_SO) && \
- $(INSTALL_X) $(FILE_SO) $(INSTALL_DYN) && \
-- $(LDCONFIG) $(INSTALL_LIB) && \
-- $(SYMLINK) $(INSTALL_SONAME) $(INSTALL_SHORT1) && \
-- $(SYMLINK) $(INSTALL_SONAME) $(INSTALL_SHORT2) || :
-+ ( $(LDCONFIG) $(INSTALL_LIB) ; \
-+ $(SYMLINK) $(INSTALL_SONAME) $(INSTALL_SHORT1) && \
-+ $(SYMLINK) $(INSTALL_SONAME) $(INSTALL_SHORT2) || : \
-+ )
- cd etc && $(INSTALL_F) $(FILE_MAN) $(INSTALL_MAN)
- cd etc && $(SED_PC) $(FILE_PC) > $(FILE_PC).tmp && \
- $(INSTALL_F) $(FILE_PC).tmp $(INSTALL_PC) && \
---
-2.2.2
-
diff --git a/gnu/packages/patches/memtest86+-build-reproducibly.patch b/gnu/packages/patches/memtest86+-build-reproducibly.patch
new file mode 100644
index 0000000000..aaaad8c643
--- /dev/null
+++ b/gnu/packages/patches/memtest86+-build-reproducibly.patch
@@ -0,0 +1,115 @@
+From ec426a6387ca49376a3af6093978bfc1388528d7 Mon Sep 17 00:00:00 2001
+From: Tobias Geerinckx-Rice <me@tobias.gr>
+Date: Sun, 23 Oct 2022 00:00:00 +0200
+Subject: [PATCH] gnu: memtest86+: Build reproducibly.
+
+Based on Debian's incomplete patch:
+<https://salsa.debian.org/debian/memtest86plus/-/blob/81d81a39103b383eb269dc5094b4538615e54d39/debian/patches/reproducible-builds.patch>.
+---
+ build32/Makefile | 12 ++++++++----
+ build64/Makefile | 12 ++++++++----
+ 2 files changed, 16 insertions(+), 8 deletions(-)
+
+diff --git a/build32/Makefile b/build32/Makefile
+index 8f3d427..d4e69ff 100644
+--- a/build32/Makefile
++++ b/build32/Makefile
+@@ -69,6 +69,8 @@ OBJS = boot/startup.o boot/efisetup.o $(SYS_OBJS) $(LIB_OBJS) $(TST_OBJS) $(APP_
+
+ all: memtest.bin memtest.efi
+
++export TZ=UTC
++
+ -include boot/efisetup.d
+ -include $(subst .o,.d,$(SYS_OBJS))
+ -include $(subst .o,.d,$(LIB_OBJS))
+@@ -153,13 +155,14 @@ esp.img: memtest.efi
+ @mkdir -p iso/EFI/BOOT
+ cp memtest.efi iso/EFI/BOOT/bootia32.efi
+ @rm -f esp.img
+- /sbin/mkdosfs -n MEMTEST-ESP -F12 -C esp.img 4096
++ /sbin/mkdosfs -i 12345678 --invariant -n MEMTEST-ESP -F12 -C esp.img 4096
+ mcopy -s -i esp.img iso/EFI ::
+
+ memtest.iso: memtest.mbr floppy.img esp.img
+ @mkdir -p iso/boot
+ cp floppy.img iso/boot/floppy.img
+- xorrisofs -pad -R -J -volid MT86PLUS_32 -graft-points -hide-rr-moved --grub2-mbr memtest.mbr \
++ xorrisofs --set_all_file_dates "=$$SOURCE_DATE_EPOCH" -uid 1000 -gid 1000 \
++ -pad -R -J -volid MT86PLUS_32 -graft-points -hide-rr-moved --grub2-mbr memtest.mbr \
+ -b /boot/floppy.img --efi-boot --interval:appended_partition_2:all:: \
+ -part_like_isohybrid -iso_mbr_part_type 0x00 -append_partition 2 0xef ./esp.img \
+ -o ./memtest.iso /boot=./iso/boot /EFI=./iso/EFI
+@@ -210,7 +213,7 @@ grub-esp.img: memtest.efi grub-bootia32.efi ../grub/${GRUB_CFG}-efi.cfg
+ cp $(GRUB_FONT_DIR)/unicode.pf2 grub-iso/EFI/BOOT/grub/fonts/
+ cp $(GRUB_LIB_DIR)/i386-efi/*.mod grub-iso/EFI/BOOT/grub/i386-efi/
+ @rm -f grub-esp.img
+- /sbin/mkdosfs -n MT86P_ESP -F12 -C grub-esp.img 8192
++ /sbin/mkdosfs -i 12345678 --invariant -n MT86P_ESP -F12 -C grub-esp.img 8192
+ mcopy -s -i grub-esp.img grub-iso/EFI ::
+
+ grub-memtest.iso: memtest.bin grub-eltorito.img ../grub/${GRUB_CFG}-legacy.cfg grub-esp.img
+@@ -220,7 +223,8 @@ grub-memtest.iso: memtest.bin grub-eltorito.img ../grub/${GRUB_CFG}-legacy.cfg g
+ cp ../grub/${GRUB_CFG}-legacy.cfg grub-iso/boot/grub/grub.cfg
+ cp $(GRUB_FONT_DIR)/unicode.pf2 grub-iso/boot/grub/fonts/
+ cp $(GRUB_LIB_DIR)/i386-pc/*.mod grub-iso/boot/grub/i386-pc/
+- xorrisofs -pad -R -J -volid MT86PLUS_32 -graft-points -hide-rr-moved \
++ xorrisofs --set_all_file_dates "=$$SOURCE_DATE_EPOCH" -uid 1000 -gid 1000 \
++ -pad -R -J -volid MT86PLUS_32 -graft-points -hide-rr-moved \
+ --grub2-mbr $(GRUB_LIB_DIR)/i386-pc/boot_hybrid.img \
+ -b /boot/eltorito.img -no-emul-boot -boot-load-size 4 -boot-info-table --grub2-boot-info \
+ --efi-boot --interval:appended_partition_2:all:: \
+diff --git a/build64/Makefile b/build64/Makefile
+index 17f46a2..1cbe161 100644
+--- a/build64/Makefile
++++ b/build64/Makefile
+@@ -68,6 +68,8 @@ OBJS = boot/startup.o boot/efisetup.o $(SYS_OBJS) $(LIB_OBJS) $(TST_OBJS) $(APP_
+
+ all: memtest.bin memtest.efi
+
++export TZ=UTC
++
+ -include boot/efisetup.d
+ -include $(subst .o,.d,$(SYS_OBJS))
+ -include $(subst .o,.d,$(LIB_OBJS))
+@@ -152,13 +154,14 @@ esp.img: memtest.efi
+ @mkdir -p iso/EFI/BOOT
+ cp memtest.efi iso/EFI/BOOT/bootx64.efi
+ @rm -f esp.img
+- /sbin/mkdosfs -n MEMTEST-ESP -F12 -C esp.img 4096
++ /sbin/mkdosfs -i 12345678 --invariant -n MEMTEST-ESP -F12 -C esp.img 4096
+ mcopy -s -i esp.img iso/EFI ::
+
+ memtest.iso: memtest.mbr floppy.img esp.img
+ @mkdir -p iso/boot
+ cp floppy.img iso/boot/floppy.img
+- xorrisofs -pad -R -J -volid MT86PLUS_64 -graft-points -hide-rr-moved --grub2-mbr memtest.mbr \
++ xorrisofs --set_all_file_dates "=$$SOURCE_DATE_EPOCH" -uid 1000 -gid 1000 \
++ -pad -R -J -volid MT86PLUS_64 -graft-points -hide-rr-moved --grub2-mbr memtest.mbr \
+ -b /boot/floppy.img --efi-boot --interval:appended_partition_2:all:: \
+ -part_like_isohybrid -iso_mbr_part_type 0x00 -append_partition 2 0xef ./esp.img \
+ -o ./memtest.iso /boot=./iso/boot /EFI=./iso/EFI
+@@ -209,7 +212,7 @@ grub-esp.img: memtest.efi grub-bootx64.efi ../grub/${GRUB_CFG}-efi.cfg
+ cp $(GRUB_FONT_DIR)/unicode.pf2 grub-iso/EFI/BOOT/grub/fonts/
+ cp $(GRUB_LIB_DIR)/x86_64-efi/*.mod grub-iso/EFI/BOOT/grub/x86_64-efi/
+ @rm -f grub-esp.img
+- /sbin/mkdosfs -n MT86P_ESP -F12 -C grub-esp.img 8192
++ /sbin/mkdosfs -i 12345678 --invariant -n MT86P_ESP -F12 -C grub-esp.img 8192
+ mcopy -s -i grub-esp.img grub-iso/EFI ::
+
+ grub-memtest.iso: memtest.bin grub-eltorito.img ../grub/${GRUB_CFG}-legacy.cfg grub-esp.img
+@@ -219,7 +222,8 @@ grub-memtest.iso: memtest.bin grub-eltorito.img ../grub/${GRUB_CFG}-legacy.cfg g
+ cp ../grub/${GRUB_CFG}-legacy.cfg grub-iso/boot/grub/grub.cfg
+ cp $(GRUB_FONT_DIR)/unicode.pf2 grub-iso/boot/grub/fonts/
+ cp $(GRUB_LIB_DIR)/i386-pc/*.mod grub-iso/boot/grub/i386-pc/
+- xorrisofs -pad -R -J -volid MT86PLUS_64 -graft-points -hide-rr-moved \
++ xorrisofs --set_all_file_dates "=$$SOURCE_DATE_EPOCH" -uid 1000 -gid 1000 \
++ -pad -R -J -volid MT86PLUS_64 -graft-points -hide-rr-moved \
+ --grub2-mbr $(GRUB_LIB_DIR)/i386-pc/boot_hybrid.img \
+ -b /boot/eltorito.img -no-emul-boot -boot-load-size 4 -boot-info-table --grub2-boot-info \
+ --efi-boot --interval:appended_partition_2:all:: \
+
+base-commit: d3bc8fa7c2a2400d9c4d58cee00168d39fa9d716
+--
+2.38.0
+
diff --git a/gnu/packages/patches/mia-vtk-version.patch b/gnu/packages/patches/mia-vtk-version.patch
new file mode 100644
index 0000000000..2435079dcc
--- /dev/null
+++ b/gnu/packages/patches/mia-vtk-version.patch
@@ -0,0 +1,15 @@
+The VTK_MAJOR_VERSION macro is not available by default in newer versions.
+This is a hack to expose it without changing all imports.
+
+diff --git a/addons/vtk/CMakeLists.txt b/addons/vtk/CMakeLists.txt
+--- a/addons/vtk/CMakeLists.txt
++++ b/addons/vtk/CMakeLists.txt
+@@ -36,6 +36,8 @@ IF(WITH_VTKIO)
+ ELSE()
+ SET(SELECTED_VTK_LIBS ${VTK_MODULES_REQUESTED})
+ ENDIF()
++
++ add_compile_definitions(VTK_MAJOR_VERSION=${VTK_VERSION_MAJOR})
+
+ SET(VTK_LINK_LIBS_MESH ${SELECTED_VTK_LIBS} miamesh)
+ SET(VTK_LINK_LIBS_3D ${SELECTED_VTK_LIBS} mia3d)
diff --git a/gnu/packages/patches/mia-vtk92.patch b/gnu/packages/patches/mia-vtk92.patch
new file mode 100644
index 0000000000..afcb14e564
--- /dev/null
+++ b/gnu/packages/patches/mia-vtk92.patch
@@ -0,0 +1,14 @@
+Boolean pixel values are no longer supported in VTK 9.2.
+
+diff --git a/addons/vtk/test_vtkimage.cc b/addons/vtk/test_vtkimage.cc
+--- a/addons/vtk/test_vtkimage.cc
++++ b/addons/vtk/test_vtkimage.cc
+@@ -206,7 +206,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE( test_mhd_write_read, T, type_mhd )
+ unlink(zrawfilename.str().c_str());
+ }
+
+-#if VTK_MAJOR_VERSION >= 7
++#if VTK_MAJOR_VERSION >= 7 && VTK_MAJOR_VERSION < 9
+ BOOST_AUTO_TEST_CASE( test_simple_write_read_bool )
+ {
+ C3DBounds size(2, 3, 4);
diff --git a/gnu/packages/patches/mrustc-riscv64-support.patch b/gnu/packages/patches/mrustc-riscv64-support.patch
deleted file mode 100644
index 6312116585..0000000000
--- a/gnu/packages/patches/mrustc-riscv64-support.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-Patch sent upstream for review:
-https://github.com/thepowersgang/mrustc/pull/276
-
-diff --git a/src/trans/target.cpp b/src/trans/target.cpp
-index 420a2870..4d5eefb3 100644
---- a/src/trans/target.cpp
-+++ b/src/trans/target.cpp
-@@ -65,6 +65,13 @@ const TargetArch ARCH_POWERPC64LE = {
- { /*atomic(u8)=*/true, true, true, true, true },
- TargetArch::Alignments(2, 4, 8, 16, 4, 8, 8)
- };
-+// This is a guess
-+const TargetArch ARCH_RISCV64 = {
-+ "riscv64",
-+ 64, false,
-+ { /*atomic(u8)=*/true, true, true, true, true },
-+ TargetArch::Alignments(2, 4, 8, 16, 4, 8, 8)
-+};
- TargetSpec g_target;
-
-
-@@ -455,6 +462,13 @@ namespace
- ARCH_POWERPC64LE
- };
- }
-+ else if(target_name == "riscv64-unknown-linux-gnu")
-+ {
-+ return TargetSpec {
-+ "unix", "linux", "gnu", {CodegenMode::Gnu11, false, "riscv64-unknown-linux-gnu", BACKEND_C_OPTS_GNU},
-+ ARCH_RISCV64
-+ };
-+ }
- else if(target_name == "i586-pc-windows-gnu")
- {
- return TargetSpec {
-diff --git a/tools/common/target_detect.h b/tools/common/target_detect.h
-index a052da6b..42fea91a 100644
---- a/tools/common/target_detect.h
-+++ b/tools/common/target_detect.h
-@@ -34,6 +34,8 @@
- # define DEFAULT_TARGET_NAME "powerpc64-unknown-linux-gnu"
- # elif defined(__powerpc64__) && defined(__LITTLE_ENDIAN__)
- # define DEFAULT_TARGET_NAME "powerpc64le-unknown-linux-gnu"
-+# elif defined(__riscv) && __riscv_xlen == 64
-+# define DEFAULT_TARGET_NAME "riscv64-unknown-linux-gnu"
- # else
- # warning "Unable to detect a suitable default target (linux-gnu)"
- # endif
diff --git a/gnu/packages/patches/nautilus-extension-search-path.patch b/gnu/packages/patches/nautilus-extension-search-path.patch
new file mode 100644
index 0000000000..d5dc35b241
--- /dev/null
+++ b/gnu/packages/patches/nautilus-extension-search-path.patch
@@ -0,0 +1,75 @@
+Allow Nautilus to search for extensions in the directories listed
+in $NAUTILUS_EXTENSION_PATH.
+
+diff --git a/src/nautilus-module.c b/src/nautilus-module.c
+index bf474bd..42e2a4e 100644
+--- a/src/nautilus-module.c
++++ b/src/nautilus-module.c
+@@ -211,6 +211,10 @@ static void
+ load_module_dir (const char *dirname)
+ {
+ GDir *dir;
++ static GHashTable *loaded = NULL;
++
++ if (loaded == NULL)
++ loaded = g_hash_table_new (g_str_hash, g_str_equal);
+
+ dir = g_dir_open (dirname, 0, NULL);
+
+@@ -221,15 +225,22 @@ load_module_dir (const char *dirname)
+ while ((name = g_dir_read_name (dir)))
+ {
+ if (g_str_has_suffix (name, "." G_MODULE_SUFFIX))
+- {
+- char *filename;
+-
+- filename = g_build_filename (dirname,
+- name,
+- NULL);
+- nautilus_module_load_file (filename);
+- g_free (filename);
+- }
++ {
++ /* Make sure each module is loaded only twice or this could
++ lead to a crash. Double loading can occur if DIRNAME
++ occurs more than once in $NAUTILUS_EXTENSION_PATH. */
++ if (!g_hash_table_contains (loaded, name))
++ {
++ char *filename;
++
++ filename = g_build_filename (dirname,
++ name,
++ NULL);
++ nautilus_module_load_file (filename);
++ g_hash_table_add (loaded, g_strdup (name));
++ g_free (filename);
++ }
++ }
+ }
+
+ g_dir_close (dir);
+@@ -257,10 +268,24 @@ nautilus_module_setup (void)
+
+ if (!initialized)
+ {
++ const gchar *extension_path;
+ initialized = TRUE;
+
+ load_module_dir (NAUTILUS_EXTENSIONDIR);
+
++ /* Load additional modules from the user-provided search path. */
++ extension_path = g_getenv ("NAUTILUS_EXTENSION_PATH");
++ if (extension_path)
++ {
++ char **extension_dirs, **d;
++
++ extension_dirs = g_strsplit (extension_path, ":", -1);
++ for (d = extension_dirs; d != NULL && *d != NULL; d++)
++ load_module_dir (*d);
++
++ g_strfreev (extension_dirs);
++ }
++
+ eel_debug_call_at_shutdown (free_module_objects);
+ }
+ }
diff --git a/gnu/packages/patches/oath-toolkit-xmlsec-compat.patch b/gnu/packages/patches/oath-toolkit-xmlsec-compat.patch
new file mode 100644
index 0000000000..18589366a9
--- /dev/null
+++ b/gnu/packages/patches/oath-toolkit-xmlsec-compat.patch
@@ -0,0 +1,79 @@
+Adjust tests for compatibility with newer xmlsec.
+
+Taken from upstream:
+
+ https://gitlab.com/oath-toolkit/oath-toolkit/-/commit/0ae59b9c72f69ee21044e736e292b73051df3272
+
+diff --git a/libpskc/examples/pskc-hotp-signed.xml b/libpskc/examples/pskc-hotp-signed.xml
+index 359dd4723cf2b5ae1ca53d793cc6900a15bd4969..396e0829ff245b3ad6953cc3523d77568c8a18f4 100644
+--- a/libpskc/examples/pskc-hotp-signed.xml
++++ b/libpskc/examples/pskc-hotp-signed.xml
+@@ -38,7 +38,8 @@ rIXbwqKhnBP943U4Ch31oEbZtbo+XRbiq11wv6dLNsi76TNGDqsjTKgEcSIYI6Vd
+ rMxnil6ChoIBvSSPGHhJuj1bW1EPW92JtIa6byrAj1m4RwSviQy2i65YoIdtrhRt
+ CWekj2zuL/0szv5rZMCCvxioOCA8znqELEPMfs0Aa/cACD2MZcC4gGXehNCvzYJr
+ TmB6lFpxP6f0g6eO7PVcqYN9NCwECxb5Cvx2j2uNlereY35/9oPR6YJx+V7sL+DB
+-n6F0mN8OUAFxDamepKdGRApU8uZ35624o/I4</X509Certificate>
++n6F0mN8OUAFxDamepKdGRApU8uZ35624o/I4
++</X509Certificate>
+ </X509Data>
+ </KeyInfo>
+ </Signature></KeyContainer>
+diff --git a/pskctool/tests/pskc-all-signed.xml b/pskctool/tests/pskc-all-signed.xml
+index 39cf8af5bb00e34fa5e1acb97cea59ff742758f4..5d44e72d928ccf1f46975e6ccca58297e235be95 100644
+--- a/pskctool/tests/pskc-all-signed.xml
++++ b/pskctool/tests/pskc-all-signed.xml
+@@ -38,7 +38,8 @@ rIXbwqKhnBP943U4Ch31oEbZtbo+XRbiq11wv6dLNsi76TNGDqsjTKgEcSIYI6Vd
+ rMxnil6ChoIBvSSPGHhJuj1bW1EPW92JtIa6byrAj1m4RwSviQy2i65YoIdtrhRt
+ CWekj2zuL/0szv5rZMCCvxioOCA8znqELEPMfs0Aa/cACD2MZcC4gGXehNCvzYJr
+ TmB6lFpxP6f0g6eO7PVcqYN9NCwECxb5Cvx2j2uNlereY35/9oPR6YJx+V7sL+DB
+-n6F0mN8OUAFxDamepKdGRApU8uZ35624o/I4</X509Certificate>
++n6F0mN8OUAFxDamepKdGRApU8uZ35624o/I4
++</X509Certificate>
+ </X509Data>
+ </KeyInfo>
+ </Signature></KeyContainer>
+diff --git a/pskctool/tests/tst_libexamples.sh b/pskctool/tests/tst_libexamples.sh
+index 0e8e558b4cbefbac9f2ca301fddb2ac4b8cfbdfa..5cb52ce44f40a78ab886ff12d2eac64e5b5c4c7e 100755
+--- a/pskctool/tests/tst_libexamples.sh
++++ b/pskctool/tests/tst_libexamples.sh
+@@ -1,7 +1,7 @@
+ #!/bin/sh
+
+ # tst_libexamples.sh - keep pskctool output in GTK-DOC manual up to date
+-# Copyright (C) 2012-2021 Simon Josefsson
++# Copyright (C) 2012-2022 Simon Josefsson
+
+ # This program is free software: you can redistribute it and/or modify
+ # it under the terms of the GNU General Public License as published by
+@@ -45,7 +45,8 @@ fi
+
+ $PSKCTOOL --sign --sign-key $srcdir/pskc-ee-key.pem \
+ --sign-crt $srcdir/pskc-ee-crt.pem \
+- $srcdir/../../libpskc/examples/pskc-hotp.xml > foo
++ $srcdir/../../libpskc/examples/pskc-hotp.xml \
++ | sed 's,4</X509Cert,4\n</X509Cert,' > foo
+ if ! diff -ur $srcdir/../../libpskc/examples/pskc-hotp-signed.xml foo; then
+ echo "FAIL: pskctool --sign output change, commit updated file."
+ exit 1
+diff --git a/pskctool/tests/tst_sign.sh b/pskctool/tests/tst_sign.sh
+index b62757336061394746d60f706369127fe1db466d..2d452d69f2e6595ff85a34540939801324dbdfea 100755
+--- a/pskctool/tests/tst_sign.sh
++++ b/pskctool/tests/tst_sign.sh
+@@ -1,7 +1,7 @@
+ #!/bin/sh
+
+ # tst_sign.sh - test that pskctool can sign and verify
+-# Copyright (C) 2012-2021 Simon Josefsson
++# Copyright (C) 2012-2022 Simon Josefsson
+
+ # This program is free software: you can redistribute it and/or modify
+ # it under the terms of the GNU General Public License as published by
+@@ -32,7 +32,7 @@ $PSKCTOOL --info --strict --debug $pskc_all > tmp-pre-human.txt
+ $PSKCTOOL --sign \
+ --sign-key $pskc_ee_key \
+ --sign-crt $pskc_ee_crt \
+- $pskc_all > tmp-signed.xml
++ $pskc_all | sed 's,4</X509Cert,4\n</X509Cert,' > tmp-signed.xml
+
+ diff -ur $pskc_all_signed tmp-signed.xml
+
diff --git a/gnu/packages/patches/openbios-gcc-warnings.patch b/gnu/packages/patches/openbios-gcc-warnings.patch
new file mode 100644
index 0000000000..b96cecc31e
--- /dev/null
+++ b/gnu/packages/patches/openbios-gcc-warnings.patch
@@ -0,0 +1,95 @@
+Fix warnings with recent versions of GCC.
+
+This is a combination of these commits:
+
+ https://github.com/openbios/openbios/commit/14be7d187a327a89c068c4e2551d5012a3c25703
+ https://github.com/openbios/openbios/commit/0e6b8b3cb4a25a4680f238bae76de5e370e706c8
+ https://github.com/openbios/openbios/commit/51067854a7606cceb8b1e0a3d2108da69ff46973
+
+...with minor adaptations to apply on 1.1.
+
+
+diff --git a/arch/sparc32/context.c b/arch/sparc32/context.c
+--- a/arch/sparc32/context.c
++++ b/arch/sparc32/context.c
+@@ -86,7 +86,7 @@ struct context *switch_to(struct context *ctx)
+ __context = ctx;
+ asm __volatile__ ("\n\tcall __switch_context"
+ "\n\tnop" ::: "g1", "g2", "g3", "g4", "g5", "g6", "g7",
+- "o0", "o1", "o2", "o3", "o4", "o5", "sp", "o7",
++ "o0", "o1", "o2", "o3", "o4", "o5", "o7",
+ "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
+ "i0", "i1", "i2", "i3", "i4", "i5", "i7",
+ "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9",
+diff --git a/drivers/cuda.c b/drivers/cuda.c
+--- a/drivers/cuda.c
++++ b/drivers/cuda.c
+@@ -355,7 +355,7 @@ static void
+ rtc_init(char *path)
+ {
+ phandle_t ph, aliases;
+- char buf[64];
++ char buf[128];
+
+ snprintf(buf, sizeof(buf), "%s/rtc", path);
+ REGISTER_NAMED_NODE(rtc, buf);
+diff --git a/drivers/ide.c b/drivers/ide.c
+--- a/drivers/ide.c
++++ b/drivers/ide.c
+@@ -987,7 +987,7 @@ ob_ide_identify_drive(struct ide_drive *drive)
+ drive->sect = id.sectors;
+ }
+
+- strncpy(drive->model, (char*)id.model, sizeof(id.model));
++ strncpy(drive->model, (char*)id.model, sizeof(drive->model));
+ drive->model[40] = '\0';
+ return 0;
+ }
+diff --git a/fs/hfs/hfs_fs.c b/fs/hfs/hfs_fs.c
+--- a/fs/hfs/hfs_fs.c
++++ b/fs/hfs/hfs_fs.c
+@@ -86,7 +86,7 @@ _search( hfsvol *vol, const char *path, const char *sname, hfsfile **ret_fd )
+
+ strncpy( buf, path, sizeof(buf) );
+ if( buf[strlen(buf)-1] != ':' )
+- strncat( buf, ":", sizeof(buf) );
++ strncat( buf, ":", sizeof(buf) - 1 );
+ buf[sizeof(buf)-1] = 0;
+ p = buf + strlen( buf );
+
+@@ -101,7 +101,7 @@ _search( hfsvol *vol, const char *path, const char *sname, hfsfile **ret_fd )
+ *p = 0;
+ topdir = 0;
+
+- strncat( buf, ent.name, sizeof(buf) );
++ strncat( buf, ent.name, sizeof(buf) - 1);
+ if( (status=_search(vol, buf, sname, ret_fd)) != 2 )
+ continue;
+ topdir = 1;
+diff --git a/libc/string.c b/libc/string.c
+--- a/libc/string.c
++++ b/libc/string.c
+@@ -349,10 +349,7 @@ int memcmp(const void * cs,const void * ct,size_t count)
+ char *
+ strdup( const char *str )
+ {
+- char *p;
+- if( !str )
+- return NULL;
+- p = malloc( strlen(str) + 1 );
++ char *p = malloc( strlen(str) + 1 );
+ strcpy( p, str );
+ return p;
+ }
+diff --git a/packages/nvram.c b/packages/nvram.c
+--- a/packages/nvram.c
++++ b/packages/nvram.c
+@@ -105,7 +105,7 @@ create_free_part( char *ptr, int size )
+ nvpart_t *nvp = (nvpart_t*)ptr;
+ memset( nvp, 0, size );
+
+- strncpy( nvp->name, "777777777777", sizeof(nvp->name) );
++ strncpy( nvp->name, "77777777777", sizeof(nvp->name) );
+ nvp->signature = NV_SIG_FREE;
+ nvp->len_hi = (size /16) >> 8;
+ nvp->len_lo = size /16;
diff --git a/gnu/packages/patches/petri-foo-0.1.87-fix-recent-file-not-exist.patch b/gnu/packages/patches/petri-foo-0.1.87-fix-recent-file-not-exist.patch
new file mode 100644
index 0000000000..3e88487b07
--- /dev/null
+++ b/gnu/packages/patches/petri-foo-0.1.87-fix-recent-file-not-exist.patch
@@ -0,0 +1,24 @@
+diff -Naur a/gui/bank-ops.c b/gui/bank-ops.c
+--- a/gui/bank-ops.c 2012-08-06 05:33:34.000000000 +0200
++++ b/gui/bank-ops.c 2012-08-07 17:57:28.580145691 +0200
+@@ -393,6 +393,8 @@
+ g_signal_connect_swapped(G_OBJECT(msg), "response",
+ G_CALLBACK(gtk_widget_destroy), msg);
+ gtk_widget_show (msg);
++
++ gtk_recent_manager_remove_item(recent_manager, filename, NULL);
+ }
+ else
+ {
+diff -Naur a/libpetrifui/dish_file.c b/libpetrifui/dish_file.c
+--- a/libpetrifui/dish_file.c 2012-08-06 05:33:34.000000000 +0200
++++ b/libpetrifui/dish_file.c 2012-08-07 17:56:09.063909801 +0200
+@@ -1440,7 +1440,7 @@
+
+ if (stat(path, &st) != 0)
+ {
+- msg_log(MSG_ERROR, "file '%s' does not exist\n");
++ msg_log(MSG_ERROR, "file '%s' does not exist\n", path);
+ return -1;
+ }
+
diff --git a/gnu/packages/patches/picard-fix-id3-rename-test.patch b/gnu/packages/patches/picard-fix-id3-rename-test.patch
deleted file mode 100644
index 3c4fa63c84..0000000000
--- a/gnu/packages/patches/picard-fix-id3-rename-test.patch
+++ /dev/null
@@ -1,11 +0,0 @@
-Index: b/test/formats/test_id3.py
-===================================================================
---- a/test/formats/test_id3.py
-+++ b/test/formats/test_id3.py
-@@ -266,5 +266,5 @@
- def test_rename_txxx_tags(self):
- file_path = os.path.join('test', 'data', 'test-id3-rename-tags.mp3')
-- filename = self.copy_file_tmp(file_path, 'mp3')
-+ filename = self.copy_file_tmp(file_path, '.mp3')
- raw_metadata = load_raw(filename)
- self.assertIn('TXXX:Artists', raw_metadata)
diff --git a/gnu/packages/patches/pocketfft-cpp-prefer-preprocessor-if.patch b/gnu/packages/patches/pocketfft-cpp-prefer-preprocessor-if.patch
new file mode 100644
index 0000000000..028bdf2f89
--- /dev/null
+++ b/gnu/packages/patches/pocketfft-cpp-prefer-preprocessor-if.patch
@@ -0,0 +1,109 @@
+This patch replaces #ifndef POCKETFFT_NO_VECTORS by #if POCKETFFT_NO_VECTORS.
+It also makes it the default, as SIMD instructions are not that well-suited
+for substitutes.
+
+diff --git a/pocketfft_hdronly.h b/pocketfft_hdronly.h
+index d75ada6..b2d0a23 100644
+--- a/pocketfft_hdronly.h
++++ b/pocketfft_hdronly.h
+@@ -39,6 +39,10 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ #ifndef POCKETFFT_HDRONLY_H
+ #define POCKETFFT_HDRONLY_H
+
++#ifndef POCKETFFT_NO_VECTORS
++#define POCKETFFT_NO_VECTORS 1
++#endif
++
+ #ifndef __cplusplus
+ #error This file is C++ and requires a C++ compiler.
+ #endif
+@@ -106,29 +110,29 @@ constexpr bool FORWARD = true,
+ BACKWARD = false;
+
+ // only enable vector support for gcc>=5.0 and clang>=5.0
+-#ifndef POCKETFFT_NO_VECTORS
+-#define POCKETFFT_NO_VECTORS
++#if !(POCKETFFT_NO_VECTORS)
++#define POCKETFFT_NO_VECTORS 1
+ #if defined(__INTEL_COMPILER)
+ // do nothing. This is necessary because this compiler also sets __GNUC__.
+ #elif defined(__clang__)
+ // AppleClang has their own version numbering
+ #ifdef __apple_build_version__
+ # if (__clang_major__ > 9) || (__clang_major__ == 9 && __clang_minor__ >= 1)
+-# undef POCKETFFT_NO_VECTORS
++#define POCKETFFT_NO_VECTORS 0
+ # endif
+ #elif __clang_major__ >= 5
+-# undef POCKETFFT_NO_VECTORS
++#define POCKETFFT_NO_VECTORS 0
+ #endif
+ #elif defined(__GNUC__)
+ #if __GNUC__>=5
+-#undef POCKETFFT_NO_VECTORS
++#define POCKETFFT_NO_VECTORS 0
+ #endif
+ #endif
+ #endif
+
+ template<typename T> struct VLEN { static constexpr size_t val=1; };
+
+-#ifndef POCKETFFT_NO_VECTORS
++#if !(POCKETFFT_NO_VECTORS)
+ #if (defined(__AVX512F__))
+ template<> struct VLEN<float> { static constexpr size_t val=16; };
+ template<> struct VLEN<double> { static constexpr size_t val=8; };
+@@ -145,7 +149,7 @@ template<> struct VLEN<double> { static constexpr size_t val=2; };
+ template<> struct VLEN<float> { static constexpr size_t val=4; };
+ template<> struct VLEN<double> { static constexpr size_t val=2; };
+ #else
+-#define POCKETFFT_NO_VECTORS
++#define POCKETFFT_NO_VECTORS 1
+ #endif
+ #endif
+
+@@ -180,7 +184,7 @@ template<typename T> class arr
+ T *p;
+ size_t sz;
+
+-#if defined(POCKETFFT_NO_VECTORS)
++#if POCKETFFT_NO_VECTORS
+ static T *ralloc(size_t num)
+ {
+ if (num==0) return nullptr;
+@@ -3026,7 +3030,7 @@ class rev_iter
+ template<typename T> struct VTYPE {};
+ template <typename T> using vtype_t = typename VTYPE<T>::type;
+
+-#ifndef POCKETFFT_NO_VECTORS
++#if !(POCKETFFT_NO_VECTORS)
+ template<> struct VTYPE<float>
+ {
+ using type = float __attribute__ ((vector_size (VLEN<float>::val*sizeof(float))));
+@@ -3139,7 +3143,7 @@ POCKETFFT_NOINLINE void general_nd(const cndarr<T> &in, ndarr<T> &out,
+ auto storage = alloc_tmp<T0>(in.shape(), len, sizeof(T));
+ const auto &tin(iax==0? in : out);
+ multi_iter<vlen> it(tin, out, axes[iax]);
+-#ifndef POCKETFFT_NO_VECTORS
++#if !(POCKETFFT_NO_VECTORS)
+ if (vlen>1)
+ while (it.remaining()>=vlen)
+ {
+@@ -3245,7 +3249,7 @@ template<typename T> POCKETFFT_NOINLINE void general_r2c(
+ constexpr auto vlen = VLEN<T>::val;
+ auto storage = alloc_tmp<T>(in.shape(), len, sizeof(T));
+ multi_iter<vlen> it(in, out, axis);
+-#ifndef POCKETFFT_NO_VECTORS
++#if !(POCKETFFT_NO_VECTORS)
+ if (vlen>1)
+ while (it.remaining()>=vlen)
+ {
+@@ -3300,7 +3304,7 @@ template<typename T> POCKETFFT_NOINLINE void general_c2r(
+ constexpr auto vlen = VLEN<T>::val;
+ auto storage = alloc_tmp<T>(out.shape(), len, sizeof(T));
+ multi_iter<vlen> it(in, out, axis);
+-#ifndef POCKETFFT_NO_VECTORS
++#if !(POCKETFFT_NO_VECTORS)
+ if (vlen>1)
+ while (it.remaining()>=vlen)
+ {
diff --git a/gnu/packages/patches/protobuf-fix-build-on-32bit.patch b/gnu/packages/patches/protobuf-fix-build-on-32bit.patch
new file mode 100644
index 0000000000..d586cad4b5
--- /dev/null
+++ b/gnu/packages/patches/protobuf-fix-build-on-32bit.patch
@@ -0,0 +1,139 @@
+From 5f4a52d9bff7595ec47fb6727662a1cada3cd404 Mon Sep 17 00:00:00 2001
+From: Mike Kruskal <mkruskal@google.com>
+Date: Thu, 15 Sep 2022 10:23:23 -0700
+Subject: [PATCH 3/7] Patching static assert test failure
+
+---
+Edited to remove the patches touching the php directory.
+
+ src/google/protobuf/extension_set_unittest.cc | 6 ++++--
+ 1 file changed, 4 insertions(+), 2 deletions(-)
+
+diff --git a/src/google/protobuf/extension_set_unittest.cc b/src/google/protobuf/extension_set_unittest.cc
+index 8b436bc20c..84da3c5465 100644
+--- a/src/google/protobuf/extension_set_unittest.cc
++++ b/src/google/protobuf/extension_set_unittest.cc
+@@ -855,8 +855,10 @@ TEST(ExtensionSetTest, SpaceUsedExcludingSelf) {
+ const size_t old_capacity = \
+ message->GetRepeatedExtension(unittest::repeated_##type##_extension) \
+ .Capacity(); \
+- EXPECT_GE(old_capacity, \
+- (RepeatedFieldLowerClampLimit<cpptype, sizeof(void*)>())); \
++ EXPECT_GE( \
++ old_capacity, \
++ (RepeatedFieldLowerClampLimit<cpptype, std::max(sizeof(cpptype), \
++ sizeof(void*))>())); \
+ for (int i = 0; i < 16; ++i) { \
+ message->AddExtension(unittest::repeated_##type##_extension, value); \
+ } \
+
+From c94b66706bec17d918495f4715183a5eaf0f8044 Mon Sep 17 00:00:00 2001
+From: Mike Kruskal <mkruskal@google.com>
+Date: Thu, 15 Sep 2022 11:31:31 -0700
+Subject: [PATCH 4/7] Test fixes for 32-bit architectures
+
+---
+ .../compiler/cpp/message_size_unittest.cc | 2 +-
+ .../protobuf/io/zero_copy_stream_unittest.cc | 3 ++
+ .../protobuf/repeated_field_unittest.cc | 4 +--
+ src/google/protobuf/util/time_util_test.cc | 28 +++++++++++--------
+ 4 files changed, 23 insertions(+), 14 deletions(-)
+
+diff --git a/src/google/protobuf/compiler/cpp/message_size_unittest.cc b/src/google/protobuf/compiler/cpp/message_size_unittest.cc
+index a75d77a70c..ed4a90e223 100644
+--- a/src/google/protobuf/compiler/cpp/message_size_unittest.cc
++++ b/src/google/protobuf/compiler/cpp/message_size_unittest.cc
+@@ -139,9 +139,9 @@ TEST(GeneratedMessageTest, OneStringSize) {
+
+ TEST(GeneratedMessageTest, MoreStringSize) {
+ struct MockGenerated : public MockMessageBase { // 16 bytes
+- int has_bits[1]; // 4 bytes
+ int cached_size; // 4 bytes
+ MockRepeatedPtrField data; // 24 bytes
++ // + 4 bytes padding
+ };
+ GOOGLE_CHECK_MESSAGE_SIZE(MockGenerated, 48);
+ EXPECT_EQ(sizeof(protobuf_unittest::MoreString), sizeof(MockGenerated));
+diff --git a/src/google/protobuf/io/zero_copy_stream_unittest.cc b/src/google/protobuf/io/zero_copy_stream_unittest.cc
+index d82354e571..d656da5f13 100644
+--- a/src/google/protobuf/io/zero_copy_stream_unittest.cc
++++ b/src/google/protobuf/io/zero_copy_stream_unittest.cc
+@@ -720,6 +720,9 @@ TEST_F(IoTest, StringIo) {
+
+ // Verifies that outputs up to kint32max can be created.
+ TEST_F(IoTest, LargeOutput) {
++ // Filter out this test on 32-bit architectures.
++ if(sizeof(void*) < 8) return;
++
+ std::string str;
+ StringOutputStream output(&str);
+ void* unused_data;
+diff --git a/src/google/protobuf/repeated_field_unittest.cc b/src/google/protobuf/repeated_field_unittest.cc
+index eb0b9091cf..3baf6f25bb 100644
+--- a/src/google/protobuf/repeated_field_unittest.cc
++++ b/src/google/protobuf/repeated_field_unittest.cc
+@@ -429,14 +429,14 @@ TEST(RepeatedField, ReserveNothing) {
+
+ TEST(RepeatedField, ReserveLowerClamp) {
+ int clamped_value = internal::CalculateReserveSize<bool, sizeof(void*)>(0, 1);
+- EXPECT_GE(clamped_value, 8 / sizeof(bool));
++ EXPECT_GE(clamped_value, sizeof(void*) / sizeof(bool));
+ EXPECT_EQ((internal::RepeatedFieldLowerClampLimit<bool, sizeof(void*)>()),
+ clamped_value);
+ // EXPECT_EQ(clamped_value, (internal::CalculateReserveSize<bool,
+ // sizeof(void*)>( clamped_value, 2)));
+
+ clamped_value = internal::CalculateReserveSize<int, sizeof(void*)>(0, 1);
+- EXPECT_GE(clamped_value, 8 / sizeof(int));
++ EXPECT_GE(clamped_value, sizeof(void*) / sizeof(int));
+ EXPECT_EQ((internal::RepeatedFieldLowerClampLimit<int, sizeof(void*)>()),
+ clamped_value);
+ // EXPECT_EQ(clamped_value, (internal::CalculateReserveSize<int,
+diff --git a/src/google/protobuf/util/time_util_test.cc b/src/google/protobuf/util/time_util_test.cc
+index 464e57b4ee..dceb6dbe2b 100644
+--- a/src/google/protobuf/util/time_util_test.cc
++++ b/src/google/protobuf/util/time_util_test.cc
+@@ -48,15 +48,18 @@ using google::protobuf::Timestamp;
+ namespace {
+
+ TEST(TimeUtilTest, TimestampStringFormat) {
+- Timestamp begin, end;
+- EXPECT_TRUE(TimeUtil::FromString("0001-01-01T00:00:00Z", &begin));
+- EXPECT_EQ(TimeUtil::kTimestampMinSeconds, begin.seconds());
+- EXPECT_EQ(0, begin.nanos());
+- EXPECT_TRUE(TimeUtil::FromString("9999-12-31T23:59:59.999999999Z", &end));
+- EXPECT_EQ(TimeUtil::kTimestampMaxSeconds, end.seconds());
+- EXPECT_EQ(999999999, end.nanos());
+- EXPECT_EQ("0001-01-01T00:00:00Z", TimeUtil::ToString(begin));
+- EXPECT_EQ("9999-12-31T23:59:59.999999999Z", TimeUtil::ToString(end));
++ // These these are out of bounds for 32-bit architectures.
++ if(sizeof(time_t) >= sizeof(uint64_t)) {
++ Timestamp begin, end;
++ EXPECT_TRUE(TimeUtil::FromString("0001-01-01T00:00:00Z", &begin));
++ EXPECT_EQ(TimeUtil::kTimestampMinSeconds, begin.seconds());
++ EXPECT_EQ(0, begin.nanos());
++ EXPECT_TRUE(TimeUtil::FromString("9999-12-31T23:59:59.999999999Z", &end));
++ EXPECT_EQ(TimeUtil::kTimestampMaxSeconds, end.seconds());
++ EXPECT_EQ(999999999, end.nanos());
++ EXPECT_EQ("0001-01-01T00:00:00Z", TimeUtil::ToString(begin));
++ EXPECT_EQ("9999-12-31T23:59:59.999999999Z", TimeUtil::ToString(end));
++ }
+
+ // Test negative timestamps.
+ Timestamp time = TimeUtil::NanosecondsToTimestamp(-1);
+n@@ -94,9 +97,12 @@ TEST(TimeUtilTest, DurationStringFormat) {
+ EXPECT_TRUE(TimeUtil::FromString("0001-01-01T00:00:00Z", &begin));
+ EXPECT_TRUE(TimeUtil::FromString("9999-12-31T23:59:59.999999999Z", &end));
+
+- EXPECT_EQ("315537897599.999999999s", TimeUtil::ToString(end - begin));
++ // These these are out of bounds for 32-bit architectures.
++ if(sizeof(time_t) >= sizeof(uint64_t)) {
++ EXPECT_EQ("315537897599.999999999s", TimeUtil::ToString(end - begin));
++ EXPECT_EQ("-315537897599.999999999s", TimeUtil::ToString(begin - end));
++ }
+ EXPECT_EQ(999999999, (end - begin).nanos());
+- EXPECT_EQ("-315537897599.999999999s", TimeUtil::ToString(begin - end));
+ EXPECT_EQ(-999999999, (begin - end).nanos());
+
+ // Generated output should contain 3, 6, or 9 fractional digits.
+
diff --git a/gnu/packages/patches/public-inbox-fix-spawn-test.patch b/gnu/packages/patches/public-inbox-fix-spawn-test.patch
deleted file mode 100644
index 2739b1974d..0000000000
--- a/gnu/packages/patches/public-inbox-fix-spawn-test.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-From 5593489d9c3ce22b1942f35c7ebb0e06fcf2bfa8 Mon Sep 17 00:00:00 2001
-From: Thiago Jung Bauermann <bauermann@kolabnow.com>
-Date: Fri, 10 Jun 2022 12:39:18 -0300
-Subject: [PATCH] t/spawn: Find invalid PID to try to join its process group
-
-In the container used to build packages of the GNU Guix distribution, PID 1
-runs as the same user as the test so this spawn that should fail actually
-succeeds.
-
-Fix the problem by going through different PIDs and picking one that
-either doesn't exist or we aren't allowed to signal.
----
-
-This patch is taken from the public-inbox repository and will appear in the
-release after v1.8.
-
- t/spawn.t | 13 ++++++++++++-
- 1 file changed, 12 insertions(+), 1 deletion(-)
-
-diff --git a/t/spawn.t b/t/spawn.t
-index 6168c1f6171c..5fc99a2a101c 100644
---- a/t/spawn.t
-+++ b/t/spawn.t
-@@ -24,7 +24,18 @@ SKIP: {
- is(waitpid($pid, 0), $pid, 'waitpid succeeds on spawned process');
- is($?, 0, 'true exited successfully');
- pipe(my ($r, $w)) or BAIL_OUT;
-- $pid = eval { spawn(['true'], undef, { pgid => 1, 2 => $w }) };
-+
-+ # Find invalid PID to try to join its process group.
-+ my $wrong_pgid = 1;
-+ for (my $i=0x7fffffff; $i >= 2; $i--) {
-+ if (kill(0, $i) == 0) {
-+ $wrong_pgid = $i;
-+ last;
-+ }
-+ }
-+
-+ # Test spawn behavior when it can't join the requested process group.
-+ $pid = eval { spawn(['true'], undef, { pgid => $wrong_pgid, 2 => $w }) };
- close $w;
- my $err = do { local $/; <$r> };
- # diag "$err ($@)";
diff --git a/gnu/packages/patches/python-afdko-suppress-copyright-test.patch b/gnu/packages/patches/python-afdko-suppress-copyright-test.patch
new file mode 100644
index 0000000000..94cd73d5f7
--- /dev/null
+++ b/gnu/packages/patches/python-afdko-suppress-copyright-test.patch
@@ -0,0 +1,20 @@
+Supress copyright check in tests which can differ based on current year.
+
+ https://github.com/adobe-type-tools/afdko/issues/1589
+
+Adapted from upstream:
+
+ https://github.com/adobe-type-tools/afdko/commit/feebd77d9b6507a0b32f837535511be3c94d9c6f
+
+diff --git a/tests/tx_test.py b/tests/tx_test.py
+--- a/tests/tx_test.py
++++ b/tests/tx_test.py
+@@ -1246,7 +1246,7 @@ def test_ufo_fontinfo_parsing(file, msg, ret_code):
+ if (ret_code == 0):
+ expected_path = generate_ps_dump(expected_path)
+ output_path = generate_ps_dump(output_path)
+- assert differ([expected_path, output_path])
++ assert differ([expected_path, output_path, '-s'] + PFA_SKIP)
+ else:
+ arg = [TOOL, '-t1', '-f', ufo_input_path]
+ assert subprocess.call(arg) == 6
diff --git a/gnu/packages/patches/python-apsw-3.39.2.1-test-fix.patch b/gnu/packages/patches/python-apsw-3.39.2.1-test-fix.patch
deleted file mode 100644
index cc233e3ccd..0000000000
--- a/gnu/packages/patches/python-apsw-3.39.2.1-test-fix.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 1111f902075169bd0d96cdd10607ef8499f0fed5 Mon Sep 17 00:00:00 2001
-From: Roger Binns <rogerb@rogerbinns.com>
-Date: Mon, 5 Sep 2022 07:12:25 -0700
-Subject: [PATCH] Deal with mismatched SQLITE_ENABLE_COLUMN_METADATA
-
-Address #363
----
- apsw/tests.py | 2 +-
- 1 files changed, 1 insertions(+), 1 deletion(-)
-
-diff --git a/apsw/tests.py b/apsw/tests.py
-index b4a94d3..256ead0 100644
---- a/apsw/tests.py
-+++ b/apsw/tests.py
-@@ -772,8 +772,8 @@ class APSW(unittest.TestCase):
- c.execute("drop table foo; create table foo (%s)" % (", ".join(["[%s] %s" % (n, t) for n, t in cols]), ))
- c.execute("insert into foo([x a space]) values(1)")
- c.execute("create temp table two(fred banana); insert into two values(7); create temp view three as select fred as [a space] from two")
-- has_full=any(o=="ENABLE_COLUMN_METADATA" or o.startswith("ENABLE_COLUMN_METADATA=") for o in apsw.compile_options)
-+ has_full=any(o=="ENABLE_COLUMN_METADATA" or o.startswith("ENABLE_COLUMN_METADATA=") for o in apsw.compile_options) if apsw.using_amalgamation else hasattr(c, "description_full")
- for row in c.execute("select * from foo"):
- self.assertEqual(cols, c.getdescription())
- self.assertEqual(has_full, hasattr(c, "description_full"))
-
-base-commit: f628374c5857d940067ef26c9ea4e85a08a94f76
---
-2.37.2
diff --git a/gnu/packages/patches/python-flask-restful-werkzeug-compat.patch b/gnu/packages/patches/python-flask-restful-werkzeug-compat.patch
deleted file mode 100644
index 0e928ef455..0000000000
--- a/gnu/packages/patches/python-flask-restful-werkzeug-compat.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-We need one patch on top of 0.3.8 for compatibility with Werkzeug 1.0.
-
-Taken from upstream:
-https://github.com/flask-restful/flask-restful/commit/73376a488907af3042b52678ac4c23f8a8911e5b
-
-diff --git a/tests/test_api.py b/tests/test_api.py
-index f7f8e661..6795d362 100644
---- a/tests/test_api.py
-+++ b/tests/test_api.py
-@@ -445,7 +445,9 @@ def test_handle_non_api_error(self):
-
- resp = app.get("/foo")
- self.assertEquals(resp.status_code, 404)
-- self.assertEquals('text/html', resp.headers['Content-Type'])
-+ # in newer versions of werkzeug this is `text/html; charset=utf8`
-+ content_type, _, _ = resp.headers['Content-Type'].partition(';')
-+ self.assertEquals('text/html', content_type)
-
- def test_non_api_error_404_catchall(self):
- app = Flask(__name__)
-diff --git a/tests/test_reqparse.py b/tests/test_reqparse.py
-index 2f1fbedf..9776f17c 100644
---- a/tests/test_reqparse.py
-+++ b/tests/test_reqparse.py
-@@ -2,9 +2,9 @@
- import unittest
- from mock import Mock, patch
- from flask import Flask
--from werkzeug import exceptions, MultiDict
-+from werkzeug import exceptions
- from werkzeug.wrappers import Request
--from werkzeug.datastructures import FileStorage
-+from werkzeug.datastructures import FileStorage, MultiDict
- from flask_restful.reqparse import Argument, RequestParser, Namespace
- import six
- import decimal
diff --git a/gnu/packages/patches/python-louvain-fix-test.patch b/gnu/packages/patches/python-louvain-fix-test.patch
new file mode 100644
index 0000000000..2127df681c
--- /dev/null
+++ b/gnu/packages/patches/python-louvain-fix-test.patch
@@ -0,0 +1,15 @@
+https://github.com/taynaud/python-louvain/commit/638804ae636dc65306900ef6518ca0a1c9202566.diff
+
+diff --git a/test_community.py b/test_community.py
+index 1ee1976..566a4b1 100644
+--- a/test_community.py
++++ b/test_community.py
+@@ -203,7 +203,7 @@ def test_karate(self):
+ self.assertAlmostEqual(co.modularity(part, graph),
+ co.modularity(part_weight,
+ graph,
+- "test_weight"), places=2)
++ "test_weight"), places=1)
+
+ part_res_low = co.best_partition(graph, resolution=0.1)
+ self.assertTrue(
diff --git a/gnu/packages/patches/python-pypdf-annotate-tests-appropriately.patch b/gnu/packages/patches/python-pypdf-annotate-tests-appropriately.patch
new file mode 100644
index 0000000000..14f1f73924
--- /dev/null
+++ b/gnu/packages/patches/python-pypdf-annotate-tests-appropriately.patch
@@ -0,0 +1,96 @@
+Origin: https://github.com/py-pdf/pypdf/commit/767047b98ee3ea7aca331cfbd63502a284bfed93
+From 767047b98ee3ea7aca331cfbd63502a284bfed93 Mon Sep 17 00:00:00 2001
+From: dkg <dkg@fifthhorseman.net>
+Date: Sat, 14 Jan 2023 03:32:45 -0500
+Subject: [PATCH 03/14] Annotate tests appropriately (#1551)
+
+By annotating these tests, we can use pytest markers to skip external
+tests and tests that depend on sample-files.
+---
+ tests/test_reader.py | 5 +++++
+ tests/test_writer.py | 4 ++++
+ 2 files changed, 9 insertions(+)
+
+diff --git a/tests/test_reader.py b/tests/test_reader.py
+index 710e6c5..62eb7b7 100644
+--- a/tests/test_reader.py
++++ b/tests/test_reader.py
+@@ -176,6 +176,7 @@ def test_get_outline(src, outline_elements):
+ assert len(outline) == outline_elements
+
+
++@pytest.mark.samples
+ @pytest.mark.parametrize(
+ ("src", "expected_images"),
+ [
+@@ -866,6 +867,7 @@ def test_get_fields():
+ assert dict(fields["c1-1"]) == ({"/FT": "/Btn", "/T": "c1-1"})
+
+
++@pytest.mark.external
+ def test_get_full_qualified_fields():
+ url = "https://github.com/py-pdf/PyPDF2/files/10142389/fields_with_dots.pdf"
+ name = "fields_with_dots.pdf"
+@@ -1214,6 +1216,7 @@ def test_zeroing_xref():
+ len(reader.pages)
+
+
++@pytest.mark.external
+ def test_thread():
+ url = "https://github.com/py-pdf/pypdf/files/9066120/UTA_OSHA_3115_Fall_Protection_Training_09162021_.pdf"
+ name = "UTA_OSHA.pdf"
+@@ -1226,6 +1229,7 @@ def test_thread():
+ assert len(reader.threads) >= 1
+
+
++@pytest.mark.external
+ def test_build_outline_item(caplog):
+ url = "https://github.com/py-pdf/pypdf/files/9464742/shiv_resume.pdf"
+ name = "shiv_resume.pdf"
+@@ -1253,6 +1257,7 @@ def test_build_outline_item(caplog):
+ assert "Unexpected destination 2" in exc.value.args[0]
+
+
++@pytest.mark.samples
+ @pytest.mark.parametrize(
+ ("src", "page_labels"),
+ [
+diff --git a/tests/test_writer.py b/tests/test_writer.py
+index 60b4a17..20c4de0 100644
+--- a/tests/test_writer.py
++++ b/tests/test_writer.py
+@@ -930,6 +930,7 @@ def test_startup_dest():
+ pdf_file_writer.open_destination = None
+
+
++@pytest.mark.external
+ def test_iss471():
+ url = "https://github.com/py-pdf/pypdf/files/9139245/book.pdf"
+ name = "book_471.pdf"
+@@ -942,6 +943,7 @@ def test_iss471():
+ )
+
+
++@pytest.mark.external
+ def test_reset_translation():
+ url = "https://corpora.tika.apache.org/base/docs/govdocs1/924/924666.pdf"
+ name = "tika-924666.pdf"
+@@ -977,6 +979,7 @@ def test_threads_empty():
+ assert thr == thr2
+
+
++@pytest.mark.external
+ def test_append_without_annots_and_articles():
+ url = "https://corpora.tika.apache.org/base/docs/govdocs1/924/924666.pdf"
+ name = "tika-924666.pdf"
+@@ -993,6 +996,7 @@ def test_append_without_annots_and_articles():
+ assert len(writer.threads) >= 1
+
+
++@pytest.mark.external
+ def test_append_multiple():
+ url = "https://corpora.tika.apache.org/base/docs/govdocs1/924/924666.pdf"
+ name = "tika-924666.pdf"
+--
+2.39.1
+
diff --git a/gnu/packages/patches/python-seaborn-2690.patch b/gnu/packages/patches/python-seaborn-2690.patch
deleted file mode 100644
index 4662d19c2d..0000000000
--- a/gnu/packages/patches/python-seaborn-2690.patch
+++ /dev/null
@@ -1,268 +0,0 @@
-This patch was adapted from the upstream pull request 2690.
-
-From ebd6812d48f5b8ed1ebb7d79bda0b2a7b9ae2812 Mon Sep 17 00:00:00 2001
-From: Michael Waskom <mwaskom@gmail.com>
-Date: Sun, 31 Oct 2021 15:09:27 -0400
-Subject: [PATCH 1/4] Update boxplot tests for mpl3.5 compatability
-
----
- seaborn/tests/test_categorical.py | 30 +++++++++++++++++++-----------
- 1 file changed, 19 insertions(+), 11 deletions(-)
-
-diff --git a/seaborn/tests/test_categorical.py b/seaborn/tests/test_categorical.py
-index d4e09b703..488fad2d6 100644
---- a/seaborn/tests/test_categorical.py
-+++ b/seaborn/tests/test_categorical.py
-@@ -110,6 +110,11 @@ class CategoricalFixture:
- df = pd.DataFrame(dict(y=y, g=g, h=h, u=u))
- x_df["W"] = g
-
-+ def get_box_artists(self, ax):
-+
-+ # Exclude labeled patches, which are for the legend
-+ return [p for p in ax.patches if not p.get_label()]
-+
-
- class TestCategoricalPlotter(CategoricalFixture):
-
-@@ -855,12 +863,12 @@ def test_hue_offsets(self):
- def test_axes_data(self):
-
- ax = cat.boxplot(x="g", y="y", data=self.df)
-- assert len(ax.artists) == 3
-+ assert len(self.get_box_artists(ax)) == 3
-
- plt.close("all")
-
- ax = cat.boxplot(x="g", y="y", hue="h", data=self.df)
-- assert len(ax.artists) == 6
-+ assert len(self.get_box_artists(ax)) == 6
-
- plt.close("all")
-
-@@ -868,14 +876,14 @@ def test_box_colors(self):
-
- ax = cat.boxplot(x="g", y="y", data=self.df, saturation=1)
- pal = palettes.color_palette(n_colors=3)
-- for patch, color in zip(ax.artists, pal):
-+ for patch, color in zip(self.get_box_artists(ax), pal):
- assert patch.get_facecolor()[:3] == color
-
- plt.close("all")
-
- ax = cat.boxplot(x="g", y="y", hue="h", data=self.df, saturation=1)
- pal = palettes.color_palette(n_colors=2)
-- for patch, color in zip(ax.artists, pal * 2):
-+ for patch, color in zip(self.get_box_artists(ax), pal * 2):
- assert patch.get_facecolor()[:3] == color
-
- plt.close("all")
-@@ -884,7 +892,7 @@ def test_draw_missing_boxes(self):
-
- ax = cat.boxplot(x="g", y="y", data=self.df,
- order=["a", "b", "c", "d"])
-- assert len(ax.artists) == 3
-+ assert len(self.get_box_artists(ax)) == 3
-
- def test_missing_data(self):
-
-@@ -894,13 +902,13 @@ def test_missing_data(self):
- y[-2:] = np.nan
-
- ax = cat.boxplot(x=x, y=y)
-- assert len(ax.artists) == 3
-+ assert len(self.get_box_artists(ax)) == 3
-
- plt.close("all")
-
- y[-1] = 0
- ax = cat.boxplot(x=x, y=y, hue=h)
-- assert len(ax.artists) == 7
-+ assert len(self.get_box_artists(ax)) == 7
-
- plt.close("all")
-
-@@ -2766,11 +2774,11 @@ def test_plot_elements(self):
-
- g = cat.catplot(x="g", y="y", data=self.df, kind="box")
- want_artists = self.g.unique().size
-- assert len(g.ax.artists) == want_artists
-+ assert len(self.get_box_artists(g.ax)) == want_artists
-
- g = cat.catplot(x="g", y="y", hue="h", data=self.df, kind="box")
- want_artists = self.g.unique().size * self.h.unique().size
-- assert len(g.ax.artists) == want_artists
-+ assert len(self.get_box_artists(g.ax)) == want_artists
-
- g = cat.catplot(x="g", y="y", data=self.df,
- kind="violin", inner=None)
-@@ -3137,14 +3145,14 @@ def test_box_colors(self):
-
- ax = cat.boxenplot(x="g", y="y", data=self.df, saturation=1)
- pal = palettes.color_palette(n_colors=3)
-- for patch, color in zip(ax.artists, pal):
-+ for patch, color in zip(self.get_box_artists(ax), pal):
- assert patch.get_facecolor()[:3] == color
-
- plt.close("all")
-
- ax = cat.boxenplot(x="g", y="y", hue="h", data=self.df, saturation=1)
- pal = palettes.color_palette(n_colors=2)
-- for patch, color in zip(ax.artists, pal * 2):
-+ for patch, color in zip(self.get_box_artists(ax), pal * 2):
- assert patch.get_facecolor()[:3] == color
-
- plt.close("all")
-
-From ff78ed38817a346e760194ab3b03b28d7ea3ba1b Mon Sep 17 00:00:00 2001
-From: Michael Waskom <mwaskom@gmail.com>
-Date: Sun, 31 Oct 2021 15:50:54 -0400
-Subject: [PATCH 2/4] Update kdeplot tests for mpl3.5 compatability
-
----
- seaborn/tests/test_distributions.py | 53 ++++++++++++++++++++---------
- 1 file changed, 37 insertions(+), 16 deletions(-)
-
-diff --git a/seaborn/tests/test_distributions.py b/seaborn/tests/test_distributions.py
-index d241fd978..466efb69e 100644
---- a/seaborn/tests/test_distributions.py
-+++ b/seaborn/tests/test_distributions.py
-@@ -39,6 +39,27 @@
- )
-
-
-+def get_contour_coords(c):
-+ """Provide compatability for change in contour artist type in mpl3.5."""
-+ # See https://github.com/matplotlib/matplotlib/issues/20906
-+ if isinstance(c, mpl.collections.LineCollection):
-+ return c.get_segments()
-+ elif isinstance(c, mpl.collections.PathCollection):
-+ return [p.vertices[:np.argmax(p.codes) + 1] for p in c.get_paths()]
-+
-+
-+def get_contour_color(c):
-+ """Provide compatability for change in contour artist type in mpl3.5."""
-+ # See https://github.com/matplotlib/matplotlib/issues/20906
-+ if isinstance(c, mpl.collections.LineCollection):
-+ return c.get_color()
-+ elif isinstance(c, mpl.collections.PathCollection):
-+ if c.get_facecolor().size:
-+ return c.get_facecolor()
-+ else:
-+ return c.get_edgecolor()
-+
-+
- class TestDistPlot(object):
-
- rs = np.random.RandomState(0)
-@@ -902,7 +923,7 @@ def test_fill_artists(self, long_df):
- f, ax = plt.subplots()
- kdeplot(data=long_df, x="x", y="y", hue="c", fill=fill)
- for c in ax.collections:
-- if fill:
-+ if fill or Version(mpl.__version__) >= Version("3.5.0b0"):
- assert isinstance(c, mpl.collections.PathCollection)
- else:
- assert isinstance(c, mpl.collections.LineCollection)
-@@ -918,8 +939,8 @@ def test_common_norm(self, rng):
- kdeplot(x=x, y=y, hue=hue, common_norm=True, ax=ax1)
- kdeplot(x=x, y=y, hue=hue, common_norm=False, ax=ax2)
-
-- n_seg_1 = sum([len(c.get_segments()) > 0 for c in ax1.collections])
-- n_seg_2 = sum([len(c.get_segments()) > 0 for c in ax2.collections])
-+ n_seg_1 = sum([len(get_contour_coords(c)) > 0 for c in ax1.collections])
-+ n_seg_2 = sum([len(get_contour_coords(c)) > 0 for c in ax2.collections])
- assert n_seg_2 > n_seg_1
-
- def test_log_scale(self, rng):
-@@ -946,7 +967,7 @@ def test_log_scale(self, rng):
- ax2.contour(10 ** xx, yy, density, levels=levels)
-
- for c1, c2 in zip(ax1.collections, ax2.collections):
-- assert_array_equal(c1.get_segments(), c2.get_segments())
-+ assert_array_equal(get_contour_coords(c1), get_contour_coords(c2))
-
- def test_bandwidth(self, rng):
-
-@@ -959,7 +980,7 @@ def test_bandwidth(self, rng):
- kdeplot(x=x, y=y, bw_adjust=2, ax=ax2)
-
- for c1, c2 in zip(ax1.collections, ax2.collections):
-- seg1, seg2 = c1.get_segments(), c2.get_segments()
-+ seg1, seg2 = get_contour_coords(c1), get_contour_coords(c2)
- if seg1 + seg2:
- x1 = seg1[0][:, 0]
- x2 = seg2[0][:, 0]
-@@ -980,9 +1001,9 @@ def test_weights(self, rng):
- kdeplot(x=x, y=y, hue=hue, weights=weights, ax=ax2)
-
- for c1, c2 in zip(ax1.collections, ax2.collections):
-- if c1.get_segments() and c2.get_segments():
-- seg1 = np.concatenate(c1.get_segments(), axis=0)
-- seg2 = np.concatenate(c2.get_segments(), axis=0)
-+ if get_contour_coords(c1) and get_contour_coords(c2):
-+ seg1 = np.concatenate(get_contour_coords(c1), axis=0)
-+ seg2 = np.concatenate(get_contour_coords(c2), axis=0)
- assert not np.array_equal(seg1, seg2)
-
- def test_hue_ignores_cmap(self, long_df):
-@@ -1030,7 +1051,7 @@ def test_levels_and_thresh(self, long_df):
- kdeplot(**plot_kws, levels=np.linspace(thresh, 1, n), ax=ax2)
-
- for c1, c2 in zip(ax1.collections, ax2.collections):
-- assert_array_equal(c1.get_segments(), c2.get_segments())
-+ assert_array_equal(get_contour_coords(c1), get_contour_coords(c2))
-
- with pytest.raises(ValueError):
- kdeplot(**plot_kws, levels=[0, 1, 2])
-@@ -1042,7 +1063,7 @@ def test_levels_and_thresh(self, long_df):
- kdeplot(**plot_kws, levels=n, thresh=0, ax=ax2)
-
- for c1, c2 in zip(ax1.collections, ax2.collections):
-- assert_array_equal(c1.get_segments(), c2.get_segments())
-+ assert_array_equal(get_contour_coords(c1), get_contour_coords(c2))
- for c1, c2 in zip(ax1.collections, ax2.collections):
- assert_array_equal(c1.get_facecolors(), c2.get_facecolors())
-
-@@ -2322,13 +2343,13 @@ def test_bivariate_kde_norm(self, rng):
- z = [0] * 80 + [1] * 20
-
- g = displot(x=x, y=y, col=z, kind="kde", levels=10)
-- l1 = sum(bool(c.get_segments()) for c in g.axes.flat[0].collections)
-- l2 = sum(bool(c.get_segments()) for c in g.axes.flat[1].collections)
-+ l1 = sum(bool(get_contour_coords(c)) for c in g.axes.flat[0].collections)
-+ l2 = sum(bool(get_contour_coords(c)) for c in g.axes.flat[1].collections)
- assert l1 > l2
-
- g = displot(x=x, y=y, col=z, kind="kde", levels=10, common_norm=False)
-- l1 = sum(bool(c.get_segments()) for c in g.axes.flat[0].collections)
-- l2 = sum(bool(c.get_segments()) for c in g.axes.flat[1].collections)
-+ l1 = sum(bool(get_contour_coords(c)) for c in g.axes.flat[0].collections)
-+ l2 = sum(bool(get_contour_coords(c)) for c in g.axes.flat[1].collections)
- assert l1 == l2
-
- def test_bivariate_hist_norm(self, rng):
-
-From a20ce3fabeb23c97b5827d9fb0c6a96ac109ea64 Mon Sep 17 00:00:00 2001
-From: Michael Waskom <mwaskom@gmail.com>
-Date: Sun, 31 Oct 2021 16:10:47 -0400
-Subject: [PATCH 3/4] Update legend tests for mpl3.5 compatability
-
----
- seaborn/tests/test_distributions.py | 5 ++++-
- 1 file changed, 4 insertions(+), 1 deletion(-)
-
-diff --git a/seaborn/tests/test_distributions.py b/seaborn/tests/test_distributions.py
-index 466efb69e..024fe7541 100644
---- a/seaborn/tests/test_distributions.py
-+++ b/seaborn/tests/test_distributions.py
-@@ -872,7 +872,7 @@ def test_legend(self, long_df):
- for label, level in zip(legend_labels, order):
- assert label.get_text() == level
-
-- legend_artists = ax.legend_.findobj(mpl.lines.Line2D)[::2]
-+ legend_artists = ax.legend_.findobj(mpl.lines.Line2D)
- palette = color_palette()
- for artist, color in zip(legend_artists, palette):
- assert_colors_equal(artist.get_color(), color)
-
diff --git a/gnu/packages/patches/python-seaborn-kde-test.patch b/gnu/packages/patches/python-seaborn-kde-test.patch
deleted file mode 100644
index f300dffc6f..0000000000
--- a/gnu/packages/patches/python-seaborn-kde-test.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-This patch is an excerpt of this upstream commit:
-
- commit 0a24478a550132f1882e5be5f5dbc0fc446a8a6c
- Author: Michael Waskom <mwaskom@users.noreply.github.com>
- Date: Mon Dec 21 18:44:58 2020 -0500
-
- Raise minimal supported Python to 3.7 and bump requirements (#2396)
-
-It fixes the failure of 'test_weights'.
-
---- a/seaborn/tests/test_distributions.py
-+++ b/seaborn/tests/test_distributions.py
-@@ -709,21 +708,17 @@ class TestKDEPlotUnivariate:
- integral = integrate.trapz(ydata, np.log10(xdata))
- assert integral == pytest.approx(1)
-
-- @pytest.mark.skipif(
-- LooseVersion(scipy.__version__) < "1.2.0",
-- reason="Weights require scipy >= 1.2.0"
-- )
- def test_weights(self):
-
- x = [1, 2]
- weights = [2, 1]
-
-- ax = kdeplot(x=x, weights=weights)
-+ ax = kdeplot(x=x, weights=weights, bw_method=.1)
-
- xdata, ydata = ax.lines[0].get_xydata().T
-
-- y1 = ydata[np.argwhere(np.abs(xdata - 1).min())]
-- y2 = ydata[np.argwhere(np.abs(xdata - 2).min())]
-+ y1 = ydata[np.abs(xdata - 1).argmin()]
-+ y2 = ydata[np.abs(xdata - 2).argmin()]
-
- assert y1 == pytest.approx(2 * y2)
diff --git a/gnu/packages/patches/python-telingo-fix-comparison.patch b/gnu/packages/patches/python-telingo-fix-comparison.patch
new file mode 100644
index 0000000000..6d05048dcb
--- /dev/null
+++ b/gnu/packages/patches/python-telingo-fix-comparison.patch
@@ -0,0 +1,19 @@
+Index: source/telingo/transformers/head.py
+===================================================================
+--- source.orig/telingo/transformers/head.py
++++ source/telingo/transformers/head.py
+@@ -564,10 +564,12 @@ class HeadTransformer:
+ cond = []
+ diff = _ast.BinaryOperation(loc, _ast.BinaryOperator.Minus, param, shift)
+ if lhs.ast_type != _ast.ASTType.SymbolicTerm or lhs.symbol.type != _clingo.SymbolType.Number or lhs.symbol.number > 0:
+- cond.append(_ast.Literal(loc, _ast.Sign.NoSign, _ast.Comparison(_ast.ComparisonOperator.LessEqual, lhs, diff)))
++ cond.append(_ast.Literal(loc, _ast.Sign.NoSign,
++ _ast.Comparison(lhs, [_ast.Guard(_ast.ComparisonOperator.LessEqual, diff)])))
+
+ if rhs.ast_type != _ast.ASTType.SymbolicTerm or rhs.symbol.type != _clingo.SymbolType.Supremum:
+- cond.append(_ast.Literal(loc, _ast.Sign.NoSign, _ast.Comparison(_ast.ComparisonOperator.LessEqual, diff, rhs)))
++ cond.append(_ast.Literal(loc, _ast.Sign.NoSign,
++ _ast.Comparison(diff, [_ast.Guard(_ast.ComparisonOperator.LessEqual, rhs)])))
+
+ elems.extend([_ast.ConditionalLiteral(loc, _ast.Literal(loc, _ast.Sign.NoSign, head), cond) for head in heads])
+
diff --git a/gnu/packages/patches/qtwayland-cleanup-callbacks.patch b/gnu/packages/patches/qtwayland-cleanup-callbacks.patch
new file mode 100644
index 0000000000..b7618432cb
--- /dev/null
+++ b/gnu/packages/patches/qtwayland-cleanup-callbacks.patch
@@ -0,0 +1,52 @@
+From 42cdc61a93cf2acb09936aebb5e431fdbc0a26c6 Mon Sep 17 00:00:00 2001
+From: Georges Basile Stavracas Neto <gbsneto@gnome.org>
+Date: Thu, 27 May 2021 20:02:53 -0300
+Subject: [PATCH] Client: Always destroy frame callback in the actual callback
+
+It's good hygiene to destroy all frame callbacks. Destroy the
+frame callback and cleanup the mFrameCallback class member in
+the callback itself. The callback destruction happens before
+calling handleFrameCallback() to avoid the theoretical case
+where another frame callback is queued by handleFrameCallback(),
+and then immediately destroyed in the callback handler.
+
+Change-Id: Ide6dc95e3402932c58bfc088a9d471fda821e9a1
+Reviewed-by: Eskil Abrahamsen Blomfeldt <eskil.abrahamsen-blomfeldt@qt.io>
+---
+ src/client/qwaylandwindow.cpp | 14 +++++---------
+ 1 file changed, 5 insertions(+), 9 deletions(-)
+
+diff --git a/src/client/qwaylandwindow.cpp b/src/client/qwaylandwindow.cpp
+index d83d51695..5561f58f7 100644
+--- a/src/client/qwaylandwindow.cpp
++++ b/src/client/qwaylandwindow.cpp
+@@ -659,9 +659,13 @@ void QWaylandWindow::commit()
+
+ const wl_callback_listener QWaylandWindow::callbackListener = {
+ [](void *data, wl_callback *callback, uint32_t time) {
+- Q_UNUSED(callback);
+ Q_UNUSED(time);
+ auto *window = static_cast<QWaylandWindow*>(data);
++
++ Q_ASSERT(callback == window->mFrameCallback);
++ wl_callback_destroy(callback);
++ window->mFrameCallback = nullptr;
++
+ window->handleFrameCallback();
+ }
+ };
+@@ -1366,11 +1370,6 @@ void QWaylandWindow::handleUpdate()
+ if (!mSurface)
+ return;
+
+- if (mFrameCallback) {
+- wl_callback_destroy(mFrameCallback);
+- mFrameCallback = nullptr;
+- }
+-
+ QMutexLocker locker(mFrameQueue.mutex);
+ struct ::wl_surface *wrappedSurface = reinterpret_cast<struct ::wl_surface *>(wl_proxy_create_wrapper(mSurface->object()));
+ wl_proxy_set_queue(reinterpret_cast<wl_proxy *>(wrappedSurface), mFrameQueue.queue);
+--
+2.38.1
+
diff --git a/gnu/packages/patches/qtwayland-dont-recreate-callbacks.patch b/gnu/packages/patches/qtwayland-dont-recreate-callbacks.patch
new file mode 100644
index 0000000000..dda2b99844
--- /dev/null
+++ b/gnu/packages/patches/qtwayland-dont-recreate-callbacks.patch
@@ -0,0 +1,76 @@
+From cbc74ba6d7186457d8d07183272e952dee5f34f9 Mon Sep 17 00:00:00 2001
+From: Georges Basile Stavracas Neto <gbsneto@gnome.org>
+Date: Thu, 27 May 2021 19:55:04 -0300
+Subject: [PATCH] Client: Don't always recreate frame callbacks
+
+The main QWaylandWindow method that is executed when handling updates is
+QWaylandWindow::handleUpdate(). This method always, unconditionally queues
+a frame callback, regardless of whether any other one is already queued.
+
+On some circumstances, e.g. when a window is hidden or completely obscured
+by other windows, it stops receiving frame callbacks from the compositor.
+However, QWaylandWindow would continue to request for them, which eventually
+fills up the Wayland socket, and causes the application to crash.
+
+This can be avoided by checking if the platform window is already waiting
+for a frame callback, before queueing another one.
+
+In QWaylandWindow::handleUpdate(), check if mWaitingForFrameCallback is true
+before queueing frame callbacks, and early return if that's the case.
+
+The XDG-shell test needed to be updated for this: The mock compositor is
+not responding to any frame callbacks, so the window will be unexposed,
+no longer get paint events and therefore not trigger any commit. This
+worked by accident before because we were issuing updates quickly enough
+to reset the timer before it had a chance to unexpose the window. The
+easiest fix is just to disable the dependency on frame callbacks in
+this test, since that is clearly not what it's testing.
+
+Task-number: QTBUG-81504
+Change-Id: Ieacb05c7d5a5fcf662243d9177ebcc308cb9ca84
+Reviewed-by: Qt CI Bot <qt_ci_bot@qt-project.org>
+Reviewed-by: Georges Basile Stavracas Neto <gbsneto@gnome.org>
+Reviewed-by: Eskil Abrahamsen Blomfeldt <eskil.abrahamsen-blomfeldt@qt.io>
+---
+ src/client/qwaylandwindow.cpp | 4 ++++
+ tests/auto/client/xdgshell/tst_xdgshell.cpp | 2 ++
+ 2 files changed, 6 insertions(+)
+
+diff --git a/src/client/qwaylandwindow.cpp b/src/client/qwaylandwindow.cpp
+index a708afce..d83d5169 100644
+--- a/src/client/qwaylandwindow.cpp
++++ b/src/client/qwaylandwindow.cpp
+@@ -1357,6 +1357,10 @@ void QWaylandWindow::requestUpdate()
+ void QWaylandWindow::handleUpdate()
+ {
+ qCDebug(lcWaylandBackingstore) << "handleUpdate" << QThread::currentThread();
++
++ if (mWaitingForFrameCallback)
++ return;
++
+ // TODO: Should sync subsurfaces avoid requesting frame callbacks?
+ QReadLocker lock(&mSurfaceLock);
+ if (!mSurface)
+diff --git a/tests/auto/client/xdgshell/tst_xdgshell.cpp b/tests/auto/client/xdgshell/tst_xdgshell.cpp
+index 1d2a2014..962093c7 100644
+--- a/tests/auto/client/xdgshell/tst_xdgshell.cpp
++++ b/tests/auto/client/xdgshell/tst_xdgshell.cpp
+@@ -138,6 +138,7 @@ void tst_xdgshell::configureSize()
+
+ void tst_xdgshell::configureStates()
+ {
++ QVERIFY(qputenv("QT_WAYLAND_FRAME_CALLBACK_TIMEOUT", "0"));
+ QRasterWindow window;
+ window.resize(64, 48);
+ window.show();
+@@ -186,6 +187,7 @@ void tst_xdgshell::configureStates()
+ QCOMPARE(window.windowStates(), Qt::WindowNoState);
+ QCOMPARE(window.frameGeometry().size(), windowedSize);
+ // QCOMPARE(window.frameGeometry().topLeft(), QPoint()); // TODO: this doesn't currently work when window decorations are enabled
++ QVERIFY(qunsetenv("QT_WAYLAND_FRAME_CALLBACK_TIMEOUT"));
+ }
+
+ void tst_xdgshell::popup()
+--
+2.38.1
+
diff --git a/gnu/packages/patches/r-mixedpower-r2power.patch b/gnu/packages/patches/r-mixedpower-r2power.patch
new file mode 100644
index 0000000000..a3307ca7b5
--- /dev/null
+++ b/gnu/packages/patches/r-mixedpower-r2power.patch
@@ -0,0 +1,26 @@
+From e882fda905150649aa887c72731144330ca297b5 Mon Sep 17 00:00:00 2001
+From: Lars-Dominik Braun <lars@6xq.net>
+Date: Fri, 28 Oct 2022 09:18:11 +0200
+Subject: [PATCH] Fix R2power with R>=4.2
+
+The previous commit missed this if statement.
+---
+ R/meta_functions.R | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/R/meta_functions.R b/R/meta_functions.R
+index a65f0c1..fb3feba 100644
+--- a/R/meta_functions.R
++++ b/R/meta_functions.R
+@@ -172,7 +172,7 @@ R2power <- function(model, data, fixed_effects, simvar,
+ } # end if databased
+
+ # 2. SESOI
+- suppressWarnings(if (SESOI != F){ # supress warning generated by if statement if SESOI =! F
++ suppressWarnings(if (!is.logical(SESOI)){
+
+ # change beta coeficients to SESOI values
+ model@beta <- SESOI
+--
+2.37.4
+
diff --git a/gnu/packages/patches/racket-backport-8.6-cross-install.patch b/gnu/packages/patches/racket-backport-8.6-cross-install.patch
deleted file mode 100644
index 2c4d8924ed..0000000000
--- a/gnu/packages/patches/racket-backport-8.6-cross-install.patch
+++ /dev/null
@@ -1,126 +0,0 @@
-From fbe2094f56fb81c888076c781e90fb0abbc0cc07 Mon Sep 17 00:00:00 2001
-From: Matthew Flatt <mflatt@racket-lang.org>
-Date: Sat, 30 Jul 2022 07:06:55 -0600
-Subject: [PATCH 1/2] CS makefiles: fix Unix-style install for cross
- compilation
-
-Closes #4377
-
-(cherry picked from commit 053be470e7c5454cdf48e934f3254d2d916bbbc5)
----
- racket/src/cs/c/build.zuo | 5 ++---
- 1 file changed, 2 insertions(+), 3 deletions(-)
-
-diff --git a/racket/src/cs/c/build.zuo b/racket/src/cs/c/build.zuo
-index 8d3950bb27..d8b74c509d 100644
---- a/racket/src/cs/c/build.zuo
-+++ b/racket/src/cs/c/build.zuo
-@@ -1016,10 +1016,9 @@
- (define (setup)
- (call-with-dest-racket
- (lambda (bindir dest-racket)
-- (define copytree-racket (and cross? (config-bootstrap-racket)))
-+ (define copytree-racket (and cross? (hash-ref (config-bootstrap-racket) 'racket)))
- (maybe-copytree config dest-racket copytree-racket at-dir)
-- (run-raco-setup config dest-racket
-- (and cross? (hash-ref (config-bootstrap-racket) 'racket))
-+ (run-raco-setup config dest-racket copytree-racket
- ;; this can be redundant if it's also supplied via `SETUP_MACHINE_FLAGS`,
- ;; but redundant should be ok:
- (list "-MCR" (~a (at-dir "compiled") ":")
---
-2.32.0
-
-
-From 85802f4d515e310e657928707800ad709a676e2a Mon Sep 17 00:00:00 2001
-From: Matthew Flatt <mflatt@racket-lang.org>
-Date: Mon, 15 Aug 2022 10:37:01 +0800
-Subject: [PATCH 2/2] configure: adjust cross-build assumption for a pb build
-
-Don't assume a cross build for a pb target on a platform that's only
-supported via pb.
-
-(cherry picked from commit 70e484e885637c495be5481983dae2207fdd67bb)
-
-(Edited to remove unrelated tweak to comments in
-"racket/src/expander/expand/require+provide.rkt".)
----
- racket/src/cs/c/configure | 15 +++++++++++----
- racket/src/cs/c/configure.ac | 15 +++++++++++----
- 2 files changed, 22 insertions(+), 8 deletions(-)
-
-diff --git a/racket/src/cs/c/configure b/racket/src/cs/c/configure
-index 454d79e11a..7e0fa5600d 100755
---- a/racket/src/cs/c/configure
-+++ b/racket/src/cs/c/configure
-@@ -4679,6 +4679,11 @@ case "$MACH_HOST_CPU" in
- ;;
- esac
-
-+if test "${MACH}" = "" ; then
-+ default_mach_was_empty=yes
-+else
-+ default_mach_was_empty=no
-+fi
-
- if test "${enable_mach}" != "" ; then
- MACH="${enable_mach}"
-@@ -4701,13 +4706,15 @@ elif test "$MACH" = "" -o "$MACH_OS" = "" ; then
- exit 1
- fi
-
--# For a pb build where Racket is supplied, force cross-build
--# mode on the assumption that the host is not a pb build
--# (because it should be created with default configure options)
-+# For a pb build where Racket is supplied and MACH was not originally
-+# empty, force cross-build mode on the assumption that the host is not
-+# a pb build (because it should be created with default configure options)
- if test "${enable_pb}" = "yes" ; then
- if test "${enable_racket}" != "" ; then
- if test "${enable_target}" = "" ; then
-- enable_target="${MACH}"
-+ if test "${default_mach_was_empty}" = "no" ; then
-+ enable_target="${MACH}"
-+ fi
- fi
- fi
- fi
-diff --git a/racket/src/cs/c/configure.ac b/racket/src/cs/c/configure.ac
-index 5bce979c92..734c1e8feb 100644
---- a/racket/src/cs/c/configure.ac
-+++ b/racket/src/cs/c/configure.ac
-@@ -415,6 +415,11 @@ case "$MACH_HOST_CPU" in
- ;;
- esac
-
-+if test "${MACH}" = "" ; then
-+ default_mach_was_empty=yes
-+else
-+ default_mach_was_empty=no
-+fi
-
- if test "${enable_mach}" != "" ; then
- MACH="${enable_mach}"
-@@ -437,13 +442,15 @@ elif test "$MACH" = "" -o "$MACH_OS" = "" ; then
- exit 1
- fi
-
--# For a pb build where Racket is supplied, force cross-build
--# mode on the assumption that the host is not a pb build
--# (because it should be created with default configure options)
-+# For a pb build where Racket is supplied and MACH was not originally
-+# empty, force cross-build mode on the assumption that the host is not
-+# a pb build (because it should be created with default configure options)
- if test "${enable_pb}" = "yes" ; then
- if test "${enable_racket}" != "" ; then
- if test "${enable_target}" = "" ; then
-- enable_target="${MACH}"
-+ if test "${default_mach_was_empty}" = "no" ; then
-+ enable_target="${MACH}"
-+ fi
- fi
- fi
- fi
---
-2.32.0
-
diff --git a/gnu/packages/patches/racket-backport-8.6-docindex-write.patch b/gnu/packages/patches/racket-backport-8.6-docindex-write.patch
deleted file mode 100644
index abe1984507..0000000000
--- a/gnu/packages/patches/racket-backport-8.6-docindex-write.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From 8b4d686a62fd66dedfc40ecdcf3698316993d614 Mon Sep 17 00:00:00 2001
-From: Philip McGrath <philip@philipmcgrath.com>
-Date: Sun, 17 Jul 2022 22:51:44 -0400
-Subject: [PATCH] racket-index: set write permission when copying
- `docindex.sqlite`
-
-Fixes https://github.com/racket/racket/issues/4357
-
-(cherry picked from commit 55b6cbdca1f36a4f37bab1519c1b658717d3cad2)
----
- pkgs/racket-index/setup/scribble.rkt | 9 ++++++++-
- 1 file changed, 8 insertions(+), 1 deletion(-)
-
-diff --git a/pkgs/racket-index/setup/scribble.rkt b/pkgs/racket-index/setup/scribble.rkt
-index 6694f0b793..e27a8fa348 100644
---- a/pkgs/racket-index/setup/scribble.rkt
-+++ b/pkgs/racket-index/setup/scribble.rkt
-@@ -252,7 +252,14 @@
- (unless (file-exists? db-file)
- (define-values (base name dir?) (split-path db-file))
- (make-directory* base)
-- (when copy-from (copy-file copy-from db-file))
-+ (when copy-from
-+ (copy-file copy-from db-file)
-+ ;; we might not have write permissions for the previous layer:
-+ ;; ensure that we do for the new file
-+ (define orig-mode (file-or-directory-permissions db-file 'bits))
-+ (define writeable-mode (bitwise-ior user-write-bit orig-mode))
-+ (unless (= writeable-mode orig-mode)
-+ (file-or-directory-permissions db-file writeable-mode)))
- (doc-db-disconnect
- (doc-db-file->connection db-file #t))))
- (when (or (ormap can-build*? main-docs)
---
-2.32.0
-
diff --git a/gnu/packages/patches/racket-backport-8.6-hurd.patch b/gnu/packages/patches/racket-backport-8.6-hurd.patch
deleted file mode 100644
index d593b01e62..0000000000
--- a/gnu/packages/patches/racket-backport-8.6-hurd.patch
+++ /dev/null
@@ -1,609 +0,0 @@
-From f17b030fa2f902bb3666913d4a3cd6ba9c146d22 Mon Sep 17 00:00:00 2001
-From: Philip McGrath <philip@philipmcgrath.com>
-Date: Wed, 3 Aug 2022 03:13:03 -0400
-Subject: [PATCH 1/3] Chez Scheme: Fix build on GNU/Hurd
-
-Mostly GNU/Hurd should take the same options as GNU/Linux. One
-difference is that the Hurd does not define macros such as `PATH_MAX` or
-`NOFILE`, because it avoids imposing arbitrary limits on such resources.
-This patch provides alternatives for localized uses of those constants,
-but it accepts the pervasive use of `PATH_MAX` in finding bootfiles for
-now. See https://www.gnu.org/software/hurd/hurd/porting/guidelines.html.
-
-(cherry picked from commit 87eee6e2adb8c6bc11e60619c706fa6295096085)
----
- racket/src/ChezScheme/README.md | 1 +
- racket/src/ChezScheme/c/number.c | 22 +++++++++----------
- racket/src/ChezScheme/c/prim5.c | 35 ++++++++++++++++++++++++++++--
- racket/src/ChezScheme/c/scheme.c | 34 +++++++++++++++++------------
- racket/src/ChezScheme/c/version.h | 7 ++++--
- racket/src/ChezScheme/configure | 17 ++++++++++-----
- racket/src/ChezScheme/s/cmacros.ss | 1 +
- racket/src/cs/c/configure | 14 ++++++++++--
- racket/src/cs/c/configure.ac | 14 ++++++++++--
- racket/src/cs/rumble/system.ss | 3 +++
- 10 files changed, 109 insertions(+), 39 deletions(-)
-
-diff --git a/racket/src/ChezScheme/README.md b/racket/src/ChezScheme/README.md
-index a4a11a5eef..25231dd9bb 100644
---- a/racket/src/ChezScheme/README.md
-+++ b/racket/src/ChezScheme/README.md
-@@ -13,6 +13,7 @@ Supported platforms (bytecode interpreter may work for others):
- * OpenBSD: x86, x86_64, ARMv6, AArch64, PowerPC32
- * NetBSD: x86, x86_64, ARMv6, AArch64, PowerPC32
- * Solaris: x86, x86_64
-+ * GNU/Hurd: x86
- * Android: ARMv7, AArch64
- * iOS: AArch64
- * WebAssembly via Emscripten (bytecode interpreter only)
-diff --git a/racket/src/ChezScheme/c/number.c b/racket/src/ChezScheme/c/number.c
-index ede38cc626..e2bce0d2d3 100644
---- a/racket/src/ChezScheme/c/number.c
-+++ b/racket/src/ChezScheme/c/number.c
-@@ -1041,15 +1041,15 @@ floating-point operations
-
- #ifdef IEEE_DOUBLE
- /* exponent stored + 1024, hidden bit to left of decimal point */
--#define bias 1023
--#define bitstoright 52
--#define m1mask 0xf
--#ifdef WIN32
--#define hidden_bit 0x10000000000000
--#else
--#define hidden_bit 0x10000000000000ULL
--#endif
--#ifdef LITTLE_ENDIAN_IEEE_DOUBLE
-+# define bias 1023
-+# define bitstoright 52
-+# define m1mask 0xf
-+# ifdef WIN32
-+# define hidden_bit 0x10000000000000
-+# else
-+# define hidden_bit 0x10000000000000ULL
-+# endif
-+# ifdef LITTLE_ENDIAN_IEEE_DOUBLE
- struct dblflt {
- UINT m4: 16;
- UINT m3: 16;
-@@ -1058,7 +1058,7 @@ struct dblflt {
- UINT e: 11;
- UINT sign: 1;
- };
--#else
-+# else
- struct dblflt {
- UINT sign: 1;
- UINT e: 11;
-@@ -1067,7 +1067,7 @@ struct dblflt {
- UINT m3: 16;
- UINT m4: 16;
- };
--#endif
-+# endif
- #endif
-
- double S_random_double(U32 m1, U32 m2, U32 m3, U32 m4, double scale) {
-diff --git a/racket/src/ChezScheme/c/prim5.c b/racket/src/ChezScheme/c/prim5.c
-index 124d1e049c..82bbf8d687 100644
---- a/racket/src/ChezScheme/c/prim5.c
-+++ b/racket/src/ChezScheme/c/prim5.c
-@@ -23,6 +23,10 @@
- #include <ctype.h>
- #include <math.h>
-
-+#if defined(__GNU__) /* Hurd */
-+#include <sys/resource.h>
-+#endif
-+
- /* locally defined functions */
- static INT s_errno(void);
- static IBOOL s_addr_in_heap(uptr x);
-@@ -58,7 +62,7 @@ static void s_showalloc(IBOOL show_dump, const char *outfn);
- static ptr s_system(const char *s);
- static ptr s_process(char *s, IBOOL stderrp);
- static I32 s_chdir(const char *inpath);
--#ifdef GETWD
-+#if defined(GETWD) || defined(__GNU__) /* Hurd */
- static char *s_getwd(void);
- #endif
- static ptr s_set_code_byte(ptr p, ptr n, ptr x);
-@@ -881,7 +885,18 @@ static ptr s_process(char *s, IBOOL stderrp) {
- CLOSE(0); if (dup(tofds[0]) != 0) _exit(1);
- CLOSE(1); if (dup(fromfds[1]) != 1) _exit(1);
- CLOSE(2); if (dup(stderrp ? errfds[1] : 1) != 2) _exit(1);
-+#ifndef __GNU__ /* Hurd */
- {INT i; for (i = 3; i < NOFILE; i++) (void)CLOSE(i);}
-+#else /* __GNU__ Hurd: no NOFILE */
-+ {
-+ INT i;
-+ struct rlimit rlim;
-+ getrlimit(RLIMIT_NOFILE, &rlim);
-+ for (i = 3; i < rlim.rlim_cur; i++) {
-+ (void)CLOSE(i);
-+ }
-+ }
-+#endif /* __GNU__ Hurd */
- execl("/bin/sh", "/bin/sh", "-c", s, NULL);
- _exit(1) /* only if execl fails */;
- /*NOTREACHED*/
-@@ -927,6 +942,22 @@ static I32 s_chdir(const char *inpath) {
- static char *s_getwd() {
- return GETWD(TO_VOIDP(&BVIT(S_bytevector(PATH_MAX), 0)));
- }
-+#elif defined(__GNU__) /* Hurd: no PATH_MAX */
-+static char *s_getwd() {
-+ char *path;
-+ size_t len;
-+ ptr bv;
-+ path = getcwd(NULL, 0);
-+ if (NULL == path) {
-+ return NULL;
-+ } else {
-+ len = strlen(path);
-+ bv = S_bytevector(len);
-+ memcpy(TO_VOIDP(&BVIT(bv, 0)), path, len);
-+ free(path);
-+ return TO_VOIDP(&BVIT(bv, 0));
-+ }
-+}
- #endif /* GETWD */
-
- static ptr s_set_code_byte(ptr p, ptr n, ptr x) {
-@@ -1817,7 +1848,7 @@ void S_prim5_init(void) {
- Sforeign_symbol("(cs)s_rational", (void *)S_rational);
- Sforeign_symbol("(cs)sub", (void *)S_sub);
- Sforeign_symbol("(cs)rem", (void *)S_rem);
--#ifdef GETWD
-+#if defined(GETWD) || defined(__GNU__) /* Hurd */
- Sforeign_symbol("(cs)s_getwd", (void *)s_getwd);
- #endif
- Sforeign_symbol("(cs)s_chdir", (void *)s_chdir);
-diff --git a/racket/src/ChezScheme/c/scheme.c b/racket/src/ChezScheme/c/scheme.c
-index ed5564540b..0c40e3eaf0 100644
---- a/racket/src/ChezScheme/c/scheme.c
-+++ b/racket/src/ChezScheme/c/scheme.c
-@@ -458,6 +458,12 @@ static IBOOL next_path(char *path, const char *name, const char *ext, const char
- static const char *path_last(const char *path);
- static char *get_defaultheapdirs(void);
-
-+#ifdef PATH_MAX
-+# define BOOT_PATH_MAX PATH_MAX
-+#else /* hack for Hurd: better to remove the restriction */
-+# define BOOT_PATH_MAX 4096
-+#endif
-+
- static const char *path_last(const char *p) {
- const char *s;
- #ifdef WIN32
-@@ -483,7 +489,7 @@ static const char *path_last(const char *p) {
-
- static char *get_defaultheapdirs() {
- char *result;
-- wchar_t buf[PATH_MAX];
-+ wchar_t buf[BOOT_PATH_MAX];
- DWORD len = sizeof(buf);
- if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, L"Software\\Chez Scheme\\csv" VERSION, L"HeapSearchPath", RRF_RT_REG_SZ, NULL, buf, &len))
- return DEFAULT_HEAP_PATH;
-@@ -512,14 +518,14 @@ static char *get_defaultheapdirs() {
- * leaving the full path with name affixed in path and *sp / *dsp pointing
- * past the current entry. it returns 1 on success and 0 if at the end of
- * the search path. path should be a pointer to an unoccupied buffer
-- * PATH_MAX characters long. either or both of sp/dsp may be empty,
-+ * BOOT_PATH_MAX characters long. either or both of sp/dsp may be empty,
- * but neither may be null, i.e., (char *)0. */
- static IBOOL next_path(char *path, const char *name, const char *ext,
- const char **sp, const char **dsp) {
- char *p;
- const char *s, *t;
-
--#define setp(c) if (p >= path + PATH_MAX) { fprintf(stderr, "search path entry too long\n"); S_abnormal_exit(); } else *p++ = (c)
-+#define setp(c) if (p >= path + BOOT_PATH_MAX) { fprintf(stderr, "search path entry too long\n"); S_abnormal_exit(); } else *p++ = (c)
- for (;;) {
- s = *sp;
- p = path;
-@@ -532,10 +538,10 @@ static IBOOL next_path(char *path, const char *name, const char *ext,
- switch (*s) {
- #ifdef WIN32
- case 'x': {
-- wchar_t exepath[PATH_MAX]; DWORD n;
-+ wchar_t exepath[BOOT_PATH_MAX]; DWORD n;
- s += 1;
-- n = GetModuleFileNameW(NULL, exepath, PATH_MAX);
-- if (n == 0 || (n == PATH_MAX && GetLastError() == ERROR_INSUFFICIENT_BUFFER)) {
-+ n = GetModuleFileNameW(NULL, exepath, BOOT_PATH_MAX);
-+ if (n == 0 || (n == BOOT_PATH_MAX && GetLastError() == ERROR_INSUFFICIENT_BUFFER)) {
- fprintf(stderr, "warning: executable path is too long; ignoring %%x\n");
- } else {
- char *tstart;
-@@ -608,7 +614,7 @@ typedef struct {
- iptr len; /* 0 => unknown */
- iptr offset;
- IBOOL need_check, close_after;
-- char path[PATH_MAX];
-+ char path[BOOT_PATH_MAX];
- } boot_desc;
-
- #define MAX_BOOT_FILES 10
-@@ -695,14 +701,14 @@ static void finish_dependencies_header(int fd, const char *path, int c) {
- static IBOOL find_boot(const char *name, const char *ext, IBOOL direct_pathp,
- int fd,
- IBOOL errorp) {
-- char pathbuf[PATH_MAX], buf[PATH_MAX];
-+ char pathbuf[BOOT_PATH_MAX], buf[BOOT_PATH_MAX];
- uptr n = 0;
- INT c;
- const char *path;
- char *expandedpath;
-
- if ((fd != -1) || direct_pathp || S_fixedpathp(name)) {
-- if (strlen(name) >= PATH_MAX) {
-+ if (strlen(name) >= BOOT_PATH_MAX) {
- fprintf(stderr, "boot-file path is too long %s\n", name);
- S_abnormal_exit();
- }
-@@ -776,7 +782,7 @@ static IBOOL find_boot(const char *name, const char *ext, IBOOL direct_pathp,
- if (boot_count == 0) {
- for (;;) {
- /* try to load heap or boot file this boot file requires */
-- if (get_string(fd, buf, PATH_MAX, &c) != 0) {
-+ if (get_string(fd, buf, BOOT_PATH_MAX, &c) != 0) {
- fprintf(stderr, "unexpected end of file on %s\n", path);
- CLOSE(fd);
- S_abnormal_exit();
-@@ -796,7 +802,7 @@ static IBOOL find_boot(const char *name, const char *ext, IBOOL direct_pathp,
- c = get_u8(fd);
- for (sep = " "; ; sep = "or ") {
- if (c == ')') break;
-- (void) get_string(fd, buf, PATH_MAX, &c);
-+ (void) get_string(fd, buf, BOOT_PATH_MAX, &c);
- fprintf(stderr, "%s%s.boot ", sep, buf);
- }
- fprintf(stderr, "required by %s\n", path);
-@@ -1090,7 +1096,7 @@ extern void Sregister_boot_file_fd_region(const char *name,
- int close_after) {
- check_boot_file_state("Sregister_boot_file_fd");
-
-- if (strlen(name) >= PATH_MAX) {
-+ if (strlen(name) >= BOOT_PATH_MAX) {
- fprintf(stderr, "boot-file path is too long %s\n", name);
- S_abnormal_exit();
- }
-@@ -1141,14 +1147,14 @@ extern void Sbuild_heap(const char *kernel, void (*custom_init)(void)) {
- }
-
- name = path_last(kernel);
-- if (strlen(name) >= PATH_MAX) {
-+ if (strlen(name) >= BOOT_PATH_MAX) {
- fprintf(stderr, "executable name too long: %s\n", name);
- S_abnormal_exit();
- }
-
- #ifdef WIN32
- { /* strip off trailing .exe, if any */
-- static char buf[PATH_MAX];
-+ static char buf[BOOT_PATH_MAX];
- iptr n;
-
- n = strlen(name) - 4;
-diff --git a/racket/src/ChezScheme/c/version.h b/racket/src/ChezScheme/c/version.h
-index a79d12621b..61751a9fad 100644
---- a/racket/src/ChezScheme/c/version.h
-+++ b/racket/src/ChezScheme/c/version.h
-@@ -80,7 +80,7 @@ FORCEINLINE void store_unaligned_uptr(uptr *addr, uptr val) {
- /*****************************************/
- /* Operating systems */
-
--#if defined(__linux__)
-+#if defined(__linux__) || defined(__GNU__) /* Hurd */
- #define NOBLOCK O_NONBLOCK
- #define LOAD_SHARED_OBJECT
- #define USE_MMAP
-@@ -91,7 +91,10 @@ FORCEINLINE void store_unaligned_uptr(uptr *addr, uptr val) {
- #define GETPAGESIZE() getpagesize()
- typedef char *memcpy_t;
- #define MAKE_NAN(x) { x = 0.0; x = x / x; }
--#define GETWD(x) getcwd((x),PATH_MAX)
-+#ifndef __GNU__ /* Hurd: no PATH_MAX */
-+/* n.b. don't test PATH_MAX directly: we have not yet included <limits.h> */
-+# define GETWD(x) getcwd((x),PATH_MAX)
-+#endif
- typedef int tputsputcchar;
- #ifndef __ANDROID__
- # define LOCKF
-diff --git a/racket/src/ChezScheme/configure b/racket/src/ChezScheme/configure
-index f64b639c3a..efdb6029cb 100755
---- a/racket/src/ChezScheme/configure
-+++ b/racket/src/ChezScheme/configure
-@@ -102,6 +102,11 @@ case "${CONFIG_UNAME}" in
- installprefix=/usr
- installmansuffix=share/man
- ;;
-+ GNU)
-+ unixsuffix=gnu # the Hurd
-+ installprefix=/usr
-+ installmansuffix=share/man
-+ ;;
- QNX)
- if uname -a | egrep 'x86' > /dev/null 2>&1 ; then
- m32=i3qnx
-@@ -591,7 +596,7 @@ fi
-
- # Infer flags needed for threads:
- case "${flagsm}" in
-- *le|*fb|*ob|*nb)
-+ *le|*gnu|*fb|*ob|*nb)
- threadFlags="-D_REENTRANT -pthread"
- threadLibs="-lpthread"
- ;;
-@@ -627,7 +632,7 @@ if [ "$cflagsset" = "no" ] ; then
- a6*)
- CFLAGS="-m64 ${optFlags}"
- ;;
-- i3le)
-+ i3le) # intentionally not including i3gnu, which may not support sse2
- CFLAGS="-m32 -msse2 -mfpmath=sse ${optFlags}"
- ;;
- i3nt)
-@@ -688,7 +693,7 @@ fi
- # Add automatic linking flags, unless suppressed by --disable-auto-flags
- if [ "$addflags" = "yes" ] ; then
- case "${flagsm}" in
-- *le)
-+ *le|*gnu)
- LDFLAGS="${LDFLAGS} -rdynamic"
- ;;
- *fb|*nb)
-@@ -702,7 +707,7 @@ if [ "$addflags" = "yes" ] ; then
- esac
-
- case "${flagsm}" in
-- *le)
-+ *le|*gnu)
- LIBS="${LIBS} -lm -ldl ${ncursesLib} -lrt"
- ;;
- *fb|*ob)
-@@ -749,7 +754,7 @@ exeSuffix=
-
- # compile flags for c/Mf-unix and mats/Mf-unix
- case "${flagsmuni}" in
-- *le)
-+ *le|*gnu)
- mdcflags="-fPIC -shared"
- ;;
- *fb|*ob)
-@@ -781,7 +786,7 @@ case "${flagsmuni}" in
- i3le)
- mdldflags="-melf_i386"
- ;;
-- *le)
-+ *le|*gnu)
- ;;
- i3nb)
- mdldflags="-m elf_i386"
-diff --git a/racket/src/ChezScheme/s/cmacros.ss b/racket/src/ChezScheme/s/cmacros.ss
-index ff2b09217b..2e79a4d8de 100644
---- a/racket/src/ChezScheme/s/cmacros.ss
-+++ b/racket/src/ChezScheme/s/cmacros.ss
-@@ -385,6 +385,7 @@
- i3fb ti3fb
- i3ob ti3ob
- i3osx ti3osx
-+ i3gnu ti3gnu
- a6le ta6le
- a6osx ta6osx
- a6ob ta6ob
-diff --git a/racket/src/cs/c/configure b/racket/src/cs/c/configure
-index 454d79e11a..dab545c0b4 100755
---- a/racket/src/cs/c/configure
-+++ b/racket/src/cs/c/configure
-@@ -4449,8 +4449,15 @@ case "$host_os" in
- ;;
- irix*)
- ;;
-- linux*)
-- MACH_OS=le
-+ linux*|gnu*)
-+ case "$host_os" in
-+ *linux*)
-+ MACH_OS=le
-+ ;;
-+ *)
-+ MACH_OS=gnu # Hurd
-+ ;;
-+ esac
- case "$host_os" in
- *linux-android*)
- ;;
-@@ -4730,6 +4737,9 @@ if test "${build_os}_${build_cpu}" != "${host_os}_${host_cpu}" ; then
- linux*)
- BUILD_OS=le
- ;;
-+ gnu*) # Hurd: must come after linux*
-+ BUILD_OS=gnu
-+ ;;
- *mingw*)
- BUILD_OS=nt
- ;;
-diff --git a/racket/src/cs/c/configure.ac b/racket/src/cs/c/configure.ac
-index 5bce979c92..43e7307b1b 100644
---- a/racket/src/cs/c/configure.ac
-+++ b/racket/src/cs/c/configure.ac
-@@ -272,8 +272,15 @@ case "$host_os" in
- ;;
- irix*)
- ;;
-- linux*)
-- MACH_OS=le
-+ linux*|gnu*)
-+ case "$host_os" in
-+ linux*)
-+ MACH_OS=le
-+ ;;
-+ *)
-+ MACH_OS=gnu # Hurd
-+ ;;
-+ esac
- case "$host_os" in
- *linux-android*)
- ;;
-@@ -466,6 +473,9 @@ if test "${build_os}_${build_cpu}" != "${host_os}_${host_cpu}" ; then
- linux*)
- BUILD_OS=le
- ;;
-+ gnu*) # Hurd - must come after linux*
-+ BUILD_OS=gnu
-+ ;;
- *mingw*)
- BUILD_OS=nt
- ;;
-diff --git a/racket/src/cs/rumble/system.ss b/racket/src/cs/rumble/system.ss
-index 2319cbe7a5..773eb79cf3 100644
---- a/racket/src/cs/rumble/system.ss
-+++ b/racket/src/cs/rumble/system.ss
-@@ -48,6 +48,8 @@
- arm32le tarm32le arm64le tarm64le
- ppc32le tppc32le)
- 'linux]
-+ [(i3gnu ti3gnu)
-+ 'gnu-hurd]
- [(a6fb ta6fb i3fb ti3fb
- arm32fb tarm32fb arm64fb tarm64fb
- ppc32fb tppc32fb)
-@@ -85,6 +87,7 @@
- i3nb ti3nb
- i3fb ti3fb
- i3s2 ti3s2
-+ i3gnu ti3gnu
- i3qnx)
- 'i386]
- [(arm32le tarm32le
---
-2.32.0
-
-
-From 8653294b771c741d320aba31e692b4f0ed0c702f Mon Sep 17 00:00:00 2001
-From: Philip McGrath <philip@philipmcgrath.com>
-Date: Thu, 4 Aug 2022 20:18:09 -0400
-Subject: [PATCH 2/3] BC: Fix build on GNU/Hurd
-
-(cherry picked from commit 5c05496afd6159c2f9cd52e7f23389fdc6b55f43)
----
- racket/src/bc/configure | 2 +-
- racket/src/bc/configure.ac | 2 +-
- racket/src/bc/sconfig.h | 14 +++++++++-----
- 3 files changed, 11 insertions(+), 7 deletions(-)
-
-diff --git a/racket/src/bc/configure b/racket/src/bc/configure
-index 4ddb607b37..4ee346014d 100755
---- a/racket/src/bc/configure
-+++ b/racket/src/bc/configure
-@@ -5018,7 +5018,7 @@ case "$host_os" in
- X_PRE_LIBS=""
- fi
- ;;
-- linux*)
-+ linux*,gnu*)
- LIBS="$LIBS -ldl -lm -lrt -rdynamic"
- DYN_CFLAGS="-fPIC"
- curses_portable_link="/usr/lib/${host_cpu}-${host_os}/libncurses.a /usr/lib/${host_cpu}-${host_os}/libtermcap.a"
-diff --git a/racket/src/bc/configure.ac b/racket/src/bc/configure.ac
-index deef8f3077..866851236e 100644
---- a/racket/src/bc/configure.ac
-+++ b/racket/src/bc/configure.ac
-@@ -557,7 +557,7 @@ case "$host_os" in
- X_PRE_LIBS=""
- fi
- ;;
-- linux*)
-+ linux*,gnu*)
- LIBS="$LIBS -ldl -lm -lrt -rdynamic"
- DYN_CFLAGS="-fPIC"
- curses_portable_link="/usr/lib/${host_cpu}-${host_os}/libncurses.a /usr/lib/${host_cpu}-${host_os}/libtermcap.a"
-diff --git a/racket/src/bc/sconfig.h b/racket/src/bc/sconfig.h
-index 01ecb48158..8468942fcd 100644
---- a/racket/src/bc/sconfig.h
-+++ b/racket/src/bc/sconfig.h
-@@ -79,12 +79,14 @@
-
- #endif
-
-- /************** Linux with gcc ****************/
-+ /************** Linux (or Hurd) with gcc ****************/
-
--#if defined(__linux__)
-+#if defined(__linux__) || defined(__GNU__)
-
- # ifdef __ANDROID__
- # define SCHEME_OS "android"
-+# elif defined(__GNU__)
-+# define SCHEME_OS "gnu-hurd"
- # else
- # define SCHEME_OS "linux"
- # endif
-@@ -146,13 +148,15 @@
- # define USE_IEEE_FP_PREDS
- # define USE_EXPLICT_FP_FORM_CHECK
-
--# define LINUX_FIND_STACK_BASE
-+# define LINUX_FIND_STACK_BASE /* also ok for Hurd */
-
- # define FLAGS_ALREADY_SET
-
- #if defined(__i386__)
--# define MZ_USE_JIT_I386
--# define MZ_JIT_USE_MPROTECT
-+# ifndef __GNU__ /* Hurd */
-+# define MZ_USE_JIT_I386
-+# define MZ_JIT_USE_MPROTECT
-+# endif
- # ifndef MZ_NO_UNWIND_SUPPORT
- # define MZ_USE_DWARF_LIBUNWIND
- # endif
---
-2.32.0
-
-
-From 1b0cd08557f58506c96f0ddd855bd9051a45a2f1 Mon Sep 17 00:00:00 2001
-From: Philip McGrath <philip@philipmcgrath.com>
-Date: Sat, 6 Aug 2022 22:48:40 -0400
-Subject: [PATCH 3/3] BC: repair configure script
-
-Hopefully this will fix the DrDr failures caused by 5c05496.
-
-Related to https://github.com/racket/racket/pull/4384
-
-(cherry picked from commit c3dd01055ed7589a18136904510fe4db557d6e77)
----
- racket/src/bc/configure | 2 +-
- racket/src/bc/configure.ac | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/racket/src/bc/configure b/racket/src/bc/configure
-index 4ee346014d..b1c5175b84 100755
---- a/racket/src/bc/configure
-+++ b/racket/src/bc/configure
-@@ -5018,7 +5018,7 @@ case "$host_os" in
- X_PRE_LIBS=""
- fi
- ;;
-- linux*,gnu*)
-+ linux*|gnu*)
- LIBS="$LIBS -ldl -lm -lrt -rdynamic"
- DYN_CFLAGS="-fPIC"
- curses_portable_link="/usr/lib/${host_cpu}-${host_os}/libncurses.a /usr/lib/${host_cpu}-${host_os}/libtermcap.a"
-diff --git a/racket/src/bc/configure.ac b/racket/src/bc/configure.ac
-index 866851236e..ecc3b0b579 100644
---- a/racket/src/bc/configure.ac
-+++ b/racket/src/bc/configure.ac
-@@ -557,7 +557,7 @@ case "$host_os" in
- X_PRE_LIBS=""
- fi
- ;;
-- linux*,gnu*)
-+ linux*|gnu*)
- LIBS="$LIBS -ldl -lm -lrt -rdynamic"
- DYN_CFLAGS="-fPIC"
- curses_portable_link="/usr/lib/${host_cpu}-${host_os}/libncurses.a /usr/lib/${host_cpu}-${host_os}/libtermcap.a"
---
-2.32.0
-
diff --git a/gnu/packages/patches/racket-backport-8.6-zuo.patch b/gnu/packages/patches/racket-backport-8.6-zuo.patch
deleted file mode 100644
index b86679b7ec..0000000000
--- a/gnu/packages/patches/racket-backport-8.6-zuo.patch
+++ /dev/null
@@ -1,481 +0,0 @@
-From 8761fc06b188b9ca2f4b7f2b7d1235075c44a321 Mon Sep 17 00:00:00 2001
-From: Matthew Flatt <mflatt@racket-lang.org>
-Date: Sat, 23 Jul 2022 17:10:58 -0600
-Subject: [PATCH 1/4] Zuo: support cross compilation via `configure` and
- `CC_FOR_BUILD`
-
-(cherry picked from commit 798a989ba6d1a30c491a3120b2c2f1570ecab911)
----
- racket/src/zuo/Makefile.in | 7 ++++++-
- racket/src/zuo/README.md | 10 ++++++++++
- racket/src/zuo/configure | 15 +++++++++++++++
- racket/src/zuo/configure.ac | 11 +++++++++++
- 4 files changed, 42 insertions(+), 1 deletion(-)
-
-diff --git a/racket/src/zuo/Makefile.in b/racket/src/zuo/Makefile.in
-index 5d16e145bf..747b584c5c 100644
---- a/racket/src/zuo/Makefile.in
-+++ b/racket/src/zuo/Makefile.in
-@@ -17,6 +17,11 @@ CPPFLAGS = @CPPFLAGS@
- LDFLAGS = @LDFLAGS@
- LIBS = @LIBS@
-
-+CC_FOR_BUILD = @CC_FOR_BUILD@
-+CFLAGS_FOR_BUILD = @CFLAGS_FOR_BUILD@
-+LDFLAGS_FOR_BUILD = @LDFLAGS_FOR_BUILD@
-+LIBS_FOR_BUILD = @LIBS_FOR_BUILD@
-+
- EMBED_LIBS = @EMBED_LIBS@
-
- .PHONY: zuos-to-run-and-install
-@@ -24,7 +29,7 @@ zuos-to-run-and-install: zuo
- ./zuo . zuos-to-run-and-install
-
- zuo: $(srcdir)/zuo.c
-- $(CC) $(CPPFLAGS) $(CFLAGS) -DZUO_LIB_PATH='"'"$(srcdir)/lib"'"' -o zuo $(srcdir)/zuo.c $(LDFLAGS) $(LIBS)
-+ $(CC_FOR_BUILD) $(FLAGS_FOR_BUILD) -DZUO_LIB_PATH='"'"$(srcdir)/lib"'"' -o zuo $(srcdir)/zuo.c $(LDFLAGS_FOR_BUILD) $(LIBS_FOR_BUILD)
-
- .PHONY: check
- check: zuo
-diff --git a/racket/src/zuo/README.md b/racket/src/zuo/README.md
-index 17c88ee9ec..3aad504b7e 100644
---- a/racket/src/zuo/README.md
-+++ b/racket/src/zuo/README.md
-@@ -84,6 +84,16 @@ A boot image is machine-independent, whether in a stand-alone file or
- embedded in `.c` source.
-
-
-+Cross Compiling
-+---------------
-+
-+If you use `./configure --host=...` to cross compile, then you will
-+also need to add something like `CC_FOR_BUILD=cc` as a `./configure`
-+argument to specify the compiler for a `zuo` to use on the build
-+machine. If necessary, you can also specify `CFLAGS_FOR_BUILD`,
-+`LDFLAGS_FOR_BUILD`, and/or `LIBS_FOR_BUILD`.
-+
-+
- Embedding Zuo in Another Application
- ------------------------------------
-
-diff --git a/racket/src/zuo/configure b/racket/src/zuo/configure
-index 1fa34a3fe8..575ce07d96 100755
---- a/racket/src/zuo/configure
-+++ b/racket/src/zuo/configure
-@@ -589,6 +589,10 @@ enable_embed="zuo"
- ac_subst_vars='LTLIBOBJS
- LIBOBJS
- EMBED_LIBS
-+LIBS_FOR_BUILD
-+LDFLAGS_FOR_BUILD
-+CFLAGS_FOR_BUILD
-+CC_FOR_BUILD
- OBJEXT
- EXEEXT
- ac_ct_CC
-@@ -2584,6 +2588,17 @@ ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $
- ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-+if test "${CC_FOR_BUILD}" = ""; then
-+ CC_FOR_BUILD='$(CC) -O2'
-+ CFLAGS_FOR_BUILD='$(CPPFLAGS) $(CFLAGS)'
-+ LDFLAGS_FOR_BUILD='$(LDFLAGS)'
-+ LIBS_FOR_BUILD='$(LIBS)'
-+fi
-+
-+
-+
-+
-+
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: zuo libraries to embed: \"${EMBED_LIBS}\"" >&5
- $as_echo "$as_me: zuo libraries to embed: \"${EMBED_LIBS}\"" >&6;}
-diff --git a/racket/src/zuo/configure.ac b/racket/src/zuo/configure.ac
-index 89b3c6391d..598ff79629 100644
---- a/racket/src/zuo/configure.ac
-+++ b/racket/src/zuo/configure.ac
-@@ -25,6 +25,17 @@ AS_IF([test "x$enable_embed" = xno],
- AC_PROG_MAKE_SET()
- AC_PROG_CC
-
-+if test "${CC_FOR_BUILD}" = ""; then
-+ CC_FOR_BUILD='$(CC) -O2'
-+ CFLAGS_FOR_BUILD='$(CPPFLAGS) $(CFLAGS)'
-+ LDFLAGS_FOR_BUILD='$(LDFLAGS)'
-+ LIBS_FOR_BUILD='$(LIBS)'
-+fi
-+AC_SUBST(CC_FOR_BUILD)
-+AC_SUBST(CFLAGS_FOR_BUILD)
-+AC_SUBST(LDFLAGS_FOR_BUILD)
-+AC_SUBST(LIBS_FOR_BUILD)
-+
- AC_SUBST(EMBED_LIBS)
- AC_MSG_NOTICE([zuo libraries to embed: "${EMBED_LIBS}"])
-
---
-2.32.0
-
-
-From f65194ea41eb472fbdd45d5f6c13eabe5e681704 Mon Sep 17 00:00:00 2001
-From: Matthew Flatt <mflatt@racket-lang.org>
-Date: Sat, 23 Jul 2022 17:47:03 -0600
-Subject: [PATCH 2/4] Zuo: sort hash keys
-
-Printing in a sorted order is helpful to make things more
-deterministic independent of symbol inputs. Making `hash-keys`
-produce a sorted list generalizes that determinism.
-
-(cherry picked from commit 4e7ffd3b365d01c5d0993c0b3fd24c9623962edf)
----
- racket/src/zuo/build.zuo | 5 ++-
- racket/src/zuo/tests/hash.zuo | 8 ++--
- racket/src/zuo/zuo-doc/lang-zuo.scrbl | 18 +++++++--
- racket/src/zuo/zuo.c | 57 ++++++++++++++++++++++++++-
- 4 files changed, 78 insertions(+), 10 deletions(-)
-
-diff --git a/racket/src/zuo/build.zuo b/racket/src/zuo/build.zuo
-index c1b5e8ce66..129240120a 100644
---- a/racket/src/zuo/build.zuo
-+++ b/racket/src/zuo/build.zuo
-@@ -47,7 +47,10 @@
- (target (at-dir (add-exe name))
- (lambda (path token)
- (rule (list image_zuo.c
-- (input-data-target 'config config)
-+ (input-data-target 'config (cons
-+ lib-path
-+ (map (lambda (key) (hash-ref config key))
-+ '(CC CPPFLAGS CFLAGS LDFLAGS LIBS))))
- (quote-module-path))
- (lambda ()
- (define l (split-path path))
-diff --git a/racket/src/zuo/tests/hash.zuo b/racket/src/zuo/tests/hash.zuo
-index a35741c730..0d3d7f3af6 100644
---- a/racket/src/zuo/tests/hash.zuo
-+++ b/racket/src/zuo/tests/hash.zuo
-@@ -35,9 +35,7 @@
-
- (check (hash-keys (hash)) '())
- (check (hash-keys (hash 'a 1)) '(a))
--(check (let ([keys (hash-keys (hash 'a 1 'b 2))])
-- (or (equal? keys '(a b))
-- (equal? keys '(b a)))))
-+(check (hash-keys (hash 'a 1 'b 2)) '(a b)) ; always in order
- (check (length (hash-keys (hash 'a 1 'b 2 'c 3))) 3)
- (check (length (hash-keys (hash 'a 1 'b 2 'a 3))) 2)
- (check-arg-fail (hash-keys 0) "not a hash table")
-@@ -50,3 +48,7 @@
- (check (hash-keys-subset? (hash 'a 1 'b 2) (hash 'b 1)) #f)
- (check-arg-fail (hash-keys-subset? 0 (hash)) "not a hash table")
- (check-arg-fail (hash-keys-subset? (hash) 0) "not a hash table")
-+
-+;; print sorts keys alphabetically:
-+(check (~a (hash 'a 1 'b 2)) "#hash((a . 1) (b . 2))")
-+(check (~a (hash 'b 2 'a 1)) "#hash((a . 1) (b . 2))")
-diff --git a/racket/src/zuo/zuo-doc/lang-zuo.scrbl b/racket/src/zuo/zuo-doc/lang-zuo.scrbl
-index 94641d041e..4605e47471 100644
---- a/racket/src/zuo/zuo-doc/lang-zuo.scrbl
-+++ b/racket/src/zuo/zuo-doc/lang-zuo.scrbl
-@@ -538,10 +538,20 @@ support to convert the textual form back into a hash table value.
-
- Analogous to @realracket*[hash? hash hash-ref hash-set hash-remove
- hash-keys hash-count hash-keys-subset?] from @racketmodname[racket].
--Besides being constrained to symbol keys, there is one additional
--difference: the third argument to @racket[hash-ref], when supplied,
--is always used as a value to return if a key is missing, as
--opposed to a failure thunk.}
-+
-+Besides being constrained to symbol keys, there are two additional
-+differences:
-+
-+@itemlist[
-+
-+ @item{the third argument to @racket[hash-ref], when supplied, is
-+ always used as a value to return if a key is missing, as
-+ opposed to a failure thunk; and}
-+
-+ @item{the @racket[hash-keys] function returns interned keys sorted
-+ alphabetically.}
-+
-+]}
-
-
- @section{Procedures}
-diff --git a/racket/src/zuo/zuo.c b/racket/src/zuo/zuo.c
-index 2957d478af..88d5747326 100644
---- a/racket/src/zuo/zuo.c
-+++ b/racket/src/zuo/zuo.c
-@@ -1298,6 +1298,59 @@ static zuo_t *zuo_trie_keys(zuo_t *trie_in, zuo_t *accum) {
- return accum;
- }
-
-+/*======================================================================*/
-+/* symbol-list sorting */
-+/*======================================================================*/
-+
-+/* merge sort used to make hash printing deterministic */
-+static zuo_t *zuo_symbol_list_sort(zuo_t *l_in) {
-+ zuo_t *l, *left, *right, *first, *last;
-+ zuo_uint_t len = 0, i;
-+
-+ for (l = l_in, len = 0; l != z.o_null; l = _zuo_cdr(l))
-+ len++;
-+
-+ if (len < 2)
-+ return l_in;
-+
-+ left = z.o_null;
-+ for (l = l_in, i = len >> 1; i > 0; l = _zuo_cdr(l), i--)
-+ left = zuo_cons(_zuo_car(l), left);
-+ right = l;
-+
-+ left = zuo_symbol_list_sort(left);
-+ right = zuo_symbol_list_sort(right);
-+
-+ first = last = z.o_null;
-+ while ((left != z.o_null) && (right != z.o_null)) {
-+ zuo_t *p;
-+
-+ if (strcmp(ZUO_STRING_PTR(((zuo_symbol_t *)_zuo_car(left))->str),
-+ ZUO_STRING_PTR(((zuo_symbol_t *)_zuo_car(right))->str))
-+ < 1) {
-+ p = zuo_cons(_zuo_car(left), z.o_null);
-+ left = _zuo_cdr(left);
-+ } else {
-+ p = zuo_cons(_zuo_car(right), z.o_null);
-+ right = _zuo_cdr(right);
-+ }
-+
-+ if (first == z.o_null)
-+ first = p;
-+ else
-+ ((zuo_pair_t *)last)->cdr = p;
-+ last = p;
-+ }
-+
-+ ((zuo_pair_t *)last)->cdr = ((left != z.o_null) ? left : right);
-+
-+ return first;
-+}
-+
-+static zuo_t *zuo_trie_sorted_keys(zuo_t *trie_in, zuo_t *accum) {
-+ return zuo_symbol_list_sort(zuo_trie_keys(trie_in, accum));
-+}
-+
- /*======================================================================*/
- /* terminal support */
- /*======================================================================*/
-@@ -1571,7 +1624,7 @@ static void zuo_out(zuo_out_t *out, zuo_t *obj, zuo_print_mode_t mode) {
- out_string(out, "opaque");
- out_string(out, ">");
- } else if (obj->tag == zuo_trie_node_tag) {
-- zuo_t *keys = zuo_trie_keys(obj, z.o_null);
-+ zuo_t *keys = zuo_trie_sorted_keys(obj, z.o_null);
- if (mode == zuo_print_mode) {
- out_string(out, "(hash");
- if (keys != z.o_null)
-@@ -2587,7 +2640,7 @@ static zuo_t *zuo_hash_remove(zuo_t *ht, zuo_t *sym) {
-
- static zuo_t *zuo_hash_keys(zuo_t *ht) {
- check_hash("hash-keys", ht);
-- return zuo_trie_keys(ht, z.o_null);
-+ return zuo_trie_sorted_keys(ht, z.o_null);
- }
-
- static zuo_t *zuo_hash_keys_subset_p(zuo_t *ht, zuo_t *ht2) {
---
-2.32.0
-
-
-From f2eecaa1dd875479d2cf51566223b3d0d7b9f738 Mon Sep 17 00:00:00 2001
-From: Matthew Flatt <mflatt@racket-lang.org>
-Date: Sat, 23 Jul 2022 18:06:41 -0600
-Subject: [PATCH 3/4] Zuo: check for nul characters in `string->symbol`
-
-(cherry picked from commit e20022ccfad40d0ba2e77aa75bc4f775018c781f)
----
- racket/src/zuo/tests/symbol.zuo | 3 +++
- racket/src/zuo/zuo-doc/lang-zuo.scrbl | 4 ++-
- racket/src/zuo/zuo.c | 37 +++++++++++++++++----------
- 3 files changed, 29 insertions(+), 15 deletions(-)
-
-diff --git a/racket/src/zuo/tests/symbol.zuo b/racket/src/zuo/tests/symbol.zuo
-index 7775aeeb04..5600a89755 100644
---- a/racket/src/zuo/tests/symbol.zuo
-+++ b/racket/src/zuo/tests/symbol.zuo
-@@ -19,3 +19,6 @@
- (check (not (equal? 'apple (string->uninterned-symbol "apple"))))
- (check-arg-fail (string->symbol 'apple) not-string)
- (check-arg-fail (string->uninterned-symbol 'apple) not-string)
-+
-+(check-arg-fail (string->symbol "apple\0spice") "without a nul character")
-+(check (symbol? (string->uninterned-symbol "apple\0spice")))
-diff --git a/racket/src/zuo/zuo-doc/lang-zuo.scrbl b/racket/src/zuo/zuo-doc/lang-zuo.scrbl
-index 4605e47471..07dd5815b0 100644
---- a/racket/src/zuo/zuo-doc/lang-zuo.scrbl
-+++ b/racket/src/zuo/zuo-doc/lang-zuo.scrbl
-@@ -500,7 +500,9 @@ back into Zuo.
- )]{
-
- Analogous to @realracket*[symbol? symbol->string string->symbol
--string->uninterned-symbol] from @racketmodname[racket].}
-+string->uninterned-symbol] from @racketmodname[racket], but
-+@racket[string->symbol] accepts only strings that do not contain the
-+null character.}
-
-
- @section{Hash Tables (Persistent Maps)}
-diff --git a/racket/src/zuo/zuo.c b/racket/src/zuo/zuo.c
-index 88d5747326..17f161826d 100644
---- a/racket/src/zuo/zuo.c
-+++ b/racket/src/zuo/zuo.c
-@@ -1323,7 +1323,7 @@ static zuo_t *zuo_symbol_list_sort(zuo_t *l_in) {
-
- first = last = z.o_null;
- while ((left != z.o_null) && (right != z.o_null)) {
-- zuo_t *p;
-+ zuo_t *p, *s_left, *s_right;
-
- if (strcmp(ZUO_STRING_PTR(((zuo_symbol_t *)_zuo_car(left))->str),
- ZUO_STRING_PTR(((zuo_symbol_t *)_zuo_car(right))->str))
-@@ -2573,8 +2573,28 @@ static zuo_t *zuo_substring(zuo_t *obj, zuo_t *start_i, zuo_t *end_i) {
- return zuo_sized_string((const char *)&((zuo_string_t *)obj)->s[s_idx], e_idx - s_idx);
- }
-
-+static int zuo_is_string_without_nul(zuo_t *obj) {
-+ zuo_int_t i;
-+
-+ if ((obj->tag != zuo_string_tag)
-+ || ZUO_STRING_LEN(obj) == 0)
-+ return 0;
-+
-+ for (i = ZUO_STRING_LEN(obj); i--; ) {
-+ if (((zuo_string_t *)obj)->s[i] == 0)
-+ return 0;
-+ }
-+
-+ return 1;
-+}
-+
- static zuo_t *zuo_string_to_symbol(zuo_t *obj) {
-- check_string("string->symbol", obj);
-+ if (!zuo_is_string_without_nul(obj)) {
-+ const char *who = "string->symbol";
-+ check_string(who, obj);
-+ zuo_fail_arg(who, "string without a nul character", obj);
-+ }
-+
- return zuo_symbol_from_string(ZUO_STRING_PTR(obj), obj);
- }
-
-@@ -3577,18 +3597,7 @@ static void *zuo_envvars_block(const char *who, zuo_t *envvars)
- #endif
-
- static int zuo_is_path_string(zuo_t *obj) {
-- zuo_int_t i;
--
-- if ((obj->tag != zuo_string_tag)
-- || ZUO_STRING_LEN(obj) == 0)
-- return 0;
--
-- for (i = ZUO_STRING_LEN(obj); i--; ) {
-- if (((zuo_string_t *)obj)->s[i] == 0)
-- return 0;
-- }
--
-- return 1;
-+ return zuo_is_string_without_nul(obj);
- }
-
- static zuo_t *zuo_path_string_p(zuo_t *obj) {
---
-2.32.0
-
-
-From de6618cb3819d25580e3cd400ea09c8cf4f673a9 Mon Sep 17 00:00:00 2001
-From: Matthew Flatt <mflatt@racket-lang.org>
-Date: Sat, 23 Jul 2022 19:50:46 -0600
-Subject: [PATCH 4/4] Zuo: CPPFLAGS_FOR_BUILD, too
-
-(cherry picked from commit cf82706c4b298f654a04c4bc8d98dff39b62a2ac)
----
- racket/src/zuo/Makefile.in | 3 ++-
- racket/src/zuo/configure | 5 ++++-
- racket/src/zuo/configure.ac | 4 +++-
- 3 files changed, 9 insertions(+), 3 deletions(-)
-
-diff --git a/racket/src/zuo/Makefile.in b/racket/src/zuo/Makefile.in
-index 747b584c5c..0376c038a8 100644
---- a/racket/src/zuo/Makefile.in
-+++ b/racket/src/zuo/Makefile.in
-@@ -19,6 +19,7 @@ LIBS = @LIBS@
-
- CC_FOR_BUILD = @CC_FOR_BUILD@
- CFLAGS_FOR_BUILD = @CFLAGS_FOR_BUILD@
-+CPPFLAGS_FOR_BUILD = @CPPFLAGS_FOR_BUILD@
- LDFLAGS_FOR_BUILD = @LDFLAGS_FOR_BUILD@
- LIBS_FOR_BUILD = @LIBS_FOR_BUILD@
-
-@@ -29,7 +30,7 @@ zuos-to-run-and-install: zuo
- ./zuo . zuos-to-run-and-install
-
- zuo: $(srcdir)/zuo.c
-- $(CC_FOR_BUILD) $(FLAGS_FOR_BUILD) -DZUO_LIB_PATH='"'"$(srcdir)/lib"'"' -o zuo $(srcdir)/zuo.c $(LDFLAGS_FOR_BUILD) $(LIBS_FOR_BUILD)
-+ $(CC_FOR_BUILD) $(CFLAGS_FOR_BUILD) $(CPPFLAGS_FOR_BUILD) -DZUO_LIB_PATH='"'"$(srcdir)/lib"'"' -o zuo $(srcdir)/zuo.c $(LDFLAGS_FOR_BUILD) $(LIBS_FOR_BUILD)
-
- .PHONY: check
- check: zuo
-diff --git a/racket/src/zuo/configure b/racket/src/zuo/configure
-index 575ce07d96..7ac453e3bc 100755
---- a/racket/src/zuo/configure
-+++ b/racket/src/zuo/configure
-@@ -591,6 +591,7 @@ LIBOBJS
- EMBED_LIBS
- LIBS_FOR_BUILD
- LDFLAGS_FOR_BUILD
-+CPPFLAGS_FOR_BUILD
- CFLAGS_FOR_BUILD
- CC_FOR_BUILD
- OBJEXT
-@@ -2590,7 +2591,8 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
- if test "${CC_FOR_BUILD}" = ""; then
- CC_FOR_BUILD='$(CC) -O2'
-- CFLAGS_FOR_BUILD='$(CPPFLAGS) $(CFLAGS)'
-+ CPPFLAGS_FOR_BUILD='$(CPPFLAGS)'
-+ CFLAGS_FOR_BUILD='$(CFLAGS)'
- LDFLAGS_FOR_BUILD='$(LDFLAGS)'
- LIBS_FOR_BUILD='$(LIBS)'
- fi
-@@ -2600,6 +2602,7 @@ fi
-
-
-
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: zuo libraries to embed: \"${EMBED_LIBS}\"" >&5
- $as_echo "$as_me: zuo libraries to embed: \"${EMBED_LIBS}\"" >&6;}
-
-diff --git a/racket/src/zuo/configure.ac b/racket/src/zuo/configure.ac
-index 598ff79629..051ea0beb5 100644
---- a/racket/src/zuo/configure.ac
-+++ b/racket/src/zuo/configure.ac
-@@ -27,12 +27,14 @@ AC_PROG_CC
-
- if test "${CC_FOR_BUILD}" = ""; then
- CC_FOR_BUILD='$(CC) -O2'
-- CFLAGS_FOR_BUILD='$(CPPFLAGS) $(CFLAGS)'
-+ CPPFLAGS_FOR_BUILD='$(CPPFLAGS)'
-+ CFLAGS_FOR_BUILD='$(CFLAGS)'
- LDFLAGS_FOR_BUILD='$(LDFLAGS)'
- LIBS_FOR_BUILD='$(LIBS)'
- fi
- AC_SUBST(CC_FOR_BUILD)
- AC_SUBST(CFLAGS_FOR_BUILD)
-+AC_SUBST(CPPFLAGS_FOR_BUILD)
- AC_SUBST(LDFLAGS_FOR_BUILD)
- AC_SUBST(LIBS_FOR_BUILD)
-
---
-2.32.0
-
diff --git a/gnu/packages/patches/racket-backport-8.7-pkg-strip.patch b/gnu/packages/patches/racket-backport-8.7-pkg-strip.patch
new file mode 100644
index 0000000000..703b6e8e82
--- /dev/null
+++ b/gnu/packages/patches/racket-backport-8.7-pkg-strip.patch
@@ -0,0 +1,90 @@
+From 1b7e15c23baf1fda44b1d0752902ddea11419fc5 Mon Sep 17 00:00:00 2001
+From: Philip McGrath <philip@philipmcgrath.com>
+Date: Fri, 7 Oct 2022 02:15:13 -0400
+Subject: [PATCH] pkg/strip: handle read-only input
+
+A package directory supplied to the functions from `pkg/strip` might
+have had all of its write permission bits unset. Since `copy-file`
+preserves the permissions of the source file, we may end up with a
+read-only file that we want to overwrite (e.g. an `info.rkt` file).
+Explicitly setting `user-write-bit` before writing avoids this problem.
+Conservatively, we only set the permissions when actually needed,
+and we restore the original permissions when we are done.
+
+(cherry picked from commit 8c647c8cc9b66112198fcf9bea27fc0e3737162f)
+---
+ racket/collects/pkg/strip.rkt | 35 +++++++++++++++++++++++++++++------
+ 1 file changed, 29 insertions(+), 6 deletions(-)
+
+diff --git a/racket/collects/pkg/strip.rkt b/racket/collects/pkg/strip.rkt
+index 0ff58cea02..5899dbc6e6 100644
+--- a/racket/collects/pkg/strip.rkt
++++ b/racket/collects/pkg/strip.rkt
+@@ -306,9 +306,8 @@
+ #t
+ new-mod*-subs))))
+ (unless (eq? mod new-mod)
+- (call-with-output-file*
++ (call-with-output-file/writable
+ new-p
+- #:exists 'truncate/replace
+ (lambda (out) (write new-mod out)))))
+
+ (define (fixup-local-redirect-reference p js-path #:user [user-js-path js-path])
+@@ -340,9 +339,8 @@
+ (string->bytes/utf-8 user-js-path)
+ (subbytes s (+ delta end2)))]
+ [else s]))))
+- (call-with-output-file*
++ (call-with-output-file/writable
+ p
+- #:exists 'truncate/replace
+ (lambda (out) (write-bytes new-bstr out)))))
+
+ ;; Used in binary[-lib] mode:
+@@ -383,9 +381,8 @@
+ (convert-mod info-lib defns)]))
+ (unless (equal? new-content content)
+ ;; write updated:
+- (call-with-output-file*
++ (call-with-output-file/writable
+ new-p
+- #:exists 'truncate
+ (lambda (out)
+ (write new-content out)
+ (newline out)))
+@@ -503,3 +500,29 @@
+ which
+ dir)
+ (current-continuation-marks)))))
++
++(define (call-with-output-file/writable pth proc)
++ ;; In case `pth` was copied from a file without the user-write-bit set,
++ ;; explicitly make it writable while we overwrite it.
++ (define (run)
++ (call-with-output-file* pth
++ #:exists 'truncate/replace
++ proc))
++ (cond
++ [(file-exists? pth)
++ (define old-mode
++ (file-or-directory-permissions pth 'bits))
++ (define new-mode
++ (if (eq? (system-type) 'windows)
++ (bitwise-ior old-mode user-write-bit group-write-bit other-write-bit)
++ (bitwise-ior old-mode user-write-bit)))
++ (if (= old-mode new-mode)
++ (run)
++ (dynamic-wind
++ (λ ()
++ (file-or-directory-permissions pth new-mode))
++ run
++ (λ ()
++ (file-or-directory-permissions pth old-mode))))]
++ [else
++ (run)]))
+
+base-commit: 7e4f6e2362d4a08affbbae3c7ee4b98e325274c6
+--
+2.38.0
+
diff --git a/gnu/packages/patches/rdkit-unbundle-external-dependencies.patch b/gnu/packages/patches/rdkit-unbundle-external-dependencies.patch
new file mode 100644
index 0000000000..8ee0611d3b
--- /dev/null
+++ b/gnu/packages/patches/rdkit-unbundle-external-dependencies.patch
@@ -0,0 +1,384 @@
+Remove CMake code for downloading and builing bundled dependencies:
+They are packaged separately as rapidjson, avalon-toolkit, freesasa,
+ringdecomposerlib and yaehmop.
+
+diff --git a/Code/GraphMol/MolInterchange/CMakeLists.txt b/Code/GraphMol/MolInterchange/CMakeLists.txt
+index 1673386a4..c3504e6f1 100644
+--- a/Code/GraphMol/MolInterchange/CMakeLists.txt
++++ b/Code/GraphMol/MolInterchange/CMakeLists.txt
+@@ -1,18 +1,3 @@
+-
+-if(NOT EXISTS "${CMAKE_SOURCE_DIR}/External/rapidjson-1.1.0")
+- downloadAndCheckMD5("https://github.com/Tencent/rapidjson/archive/v1.1.0.tar.gz"
+- "${CMAKE_SOURCE_DIR}/External/rapidjson-1.1.0.tar.gz"
+- "badd12c511e081fec6c89c43a7027bce")
+- execute_process(COMMAND ${CMAKE_COMMAND} -E tar zxf
+- ${CMAKE_SOURCE_DIR}/External/rapidjson-1.1.0.tar.gz
+- WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/External)
+-else()
+- message("-- Found RapidJSON source in ${CMAKE_SOURCE_DIR}/External")
+-endif()
+-
+-include_directories(${CMAKE_SOURCE_DIR}/External/rapidjson-1.1.0/include)
+-
+-
+ rdkit_library(MolInterchange
+ Parser.cpp Writer.cpp
+ LINK_LIBRARIES GraphMol)
+diff --git a/External/AvalonTools/AvalonTools.cpp b/External/AvalonTools/AvalonTools.cpp
+index e6c382123..d83d80668 100644
+--- a/External/AvalonTools/AvalonTools.cpp
++++ b/External/AvalonTools/AvalonTools.cpp
+@@ -24,15 +24,15 @@
+ #include "AvalonTools.h"
+
+ extern "C" {
+-#include "local.h"
+-#include "reaccs.h"
+-#include "reaccsio.h"
+-#include "utilities.h"
+-#include "ssmatch.h"
+-#include "smi2mol.h"
+-#include "canonizer.h"
+-#include "layout.h"
+-#include "struchk.h"
++#include <avalontoolkit/local.h>
++#include <avalontoolkit/reaccs.h>
++#include <avalontoolkit/reaccsio.h>
++#include <avalontoolkit/utilities.h>
++#include <avalontoolkit/ssmatch.h>
++#include <avalontoolkit/smi2mol.h>
++#include <avalontoolkit/canonizer.h>
++#include <avalontoolkit/layout.h>
++#include <avalontoolkit/struchk.h>
+
+ extern int RunStruchk(struct reaccs_molecule_t **mpp,
+ struct data_line_t *data_list);
+diff --git a/External/AvalonTools/CMakeLists.txt b/External/AvalonTools/CMakeLists.txt
+index 3e31195fc..314ba35b5 100644
+--- a/External/AvalonTools/CMakeLists.txt
++++ b/External/AvalonTools/CMakeLists.txt
+@@ -2,107 +2,14 @@ if(NOT RDK_BUILD_AVALON_SUPPORT)
+ return()
+ endif(NOT RDK_BUILD_AVALON_SUPPORT)
+
+-if(NOT DEFINED AVALONTOOLS_DIR)
+- set(AVALONTOOLS_DIR "${CMAKE_CURRENT_SOURCE_DIR}/SourceDistribution")
+- set(fileToPatch "${CMAKE_CURRENT_SOURCE_DIR}/SourceDistribution/common/reaccsio.c")
+- set(needDownload "TRUE")
+- if(EXISTS "${fileToPatch}")
+- file(READ "${fileToPatch}" buffer)
+- if("${buffer}" MATCHES "//MyFree\\(\\(char \\*\\)tempdir\\);")
+- set(needDownload "FALSE")
+- endif()
+- endif()
+-else()
+- string(REGEX REPLACE "\\\\" "/" AVALONTOOLS_DIR ${AVALONTOOLS_DIR})
+- set(needDownload "FALSE")
+-endif()
+-
+-set(AVALON_SRC_PATH ${AVALONTOOLS_DIR}/common)
+-
+-if(needDownload)
+- if(NOT DEFINED AVALONTOOLS_URL)
+- set(AVALONTOOLS_URL "https://sourceforge.net/projects/avalontoolkit/files/AvalonToolkit_1.2/AvalonToolkit_1.2.0.source.tar")
+- endif()
+- if(NOT DEFINED AVALONTOOLS_MD5SUM)
+- set(AVALONTOOLS_MD5SUM "092a94f421873f038aa67d4a6cc8cb54")
+- endif()
+- if(NOT DEFINED AVALONTOOLS_BASE)
+- string(REGEX REPLACE "^.*/" "" AVALONTOOLS_BASE "${AVALONTOOLS_URL}")
+- endif()
+- downloadAndCheckMD5(${AVALONTOOLS_URL} "${CMAKE_CURRENT_SOURCE_DIR}/${AVALONTOOLS_BASE}" ${AVALONTOOLS_MD5SUM})
+- execute_process(COMMAND ${CMAKE_COMMAND} -E tar xf
+- ${CMAKE_CURRENT_SOURCE_DIR}/AvalonToolkit_1.2.0.source.tar
+- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
+- # apply patch to AvalonTools
+- configure_file("${fileToPatch}" "${fileToPatch}.orig" COPYONLY)
+- file(READ "${fileToPatch}" buffer)
+- string(REGEX REPLACE "MyFree\\(\\(char \\*\\)tempdir\\);"
+- "//MyFree((char *)tempdir);" buffer "${buffer}")
+- file(WRITE "${fileToPatch}" "${buffer}")
+-endif()
+
+ if (MSVC)
+ add_definitions("/D_CRT_SECURE_NO_WARNINGS")
+ add_compile_options(/wd4224 /wd4101 /wd4018 /wd4996 /wd4244 /wd4305 /wd4013 /wd4146 /wd4334 /wd4715 /wd4715 /nologo)
+ endif(MSVC)
+
+-set(avalon_clib_srcs ${AVALON_SRC_PATH}/layout.c
+- ${AVALON_SRC_PATH}/symboltable.c
+- ${AVALON_SRC_PATH}/patclean.c
+- ${AVALON_SRC_PATH}/utilities.c
+- ${AVALON_SRC_PATH}/symbol_lists.c
+- ${AVALON_SRC_PATH}/stereo.c
+- ${AVALON_SRC_PATH}/set.c
+- ${AVALON_SRC_PATH}/perceive.c
+- ${AVALON_SRC_PATH}/local.c
+- ${AVALON_SRC_PATH}/graph.c
+- ${AVALON_SRC_PATH}/geometry.c
+- ${AVALON_SRC_PATH}/forio.c
+- ${AVALON_SRC_PATH}/depictutil.c
+- ${AVALON_SRC_PATH}/denormal.c
+- ${AVALON_SRC_PATH}/casutils.c
+- ${AVALON_SRC_PATH}/ssmatch.c
+- ${AVALON_SRC_PATH}/rtutils.c
+- ${AVALON_SRC_PATH}/smi2mol.c
+- ${AVALON_SRC_PATH}/didepict.c
+- ${AVALON_SRC_PATH}/pattern.c
+- ${AVALON_SRC_PATH}/canonizer.c
+- ${AVALON_SRC_PATH}/aacheck.c
+- ${AVALON_SRC_PATH}/fixcharges.c
+- ${AVALON_SRC_PATH}/struchk.c
+- ${AVALON_SRC_PATH}/reaccsio.c
+- ${AVALON_SRC_PATH}/hashcode.c
+- )
+-
+-# we need this to ensure that builds continue
+-# to work on linux systems with older versions
+-# of glibc when we're building with gcc-4.1.
+-# Without this flag, we'll endup requiring
+-# glibc 2.7.
+-if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
+- add_definitions(-D_GNU_SOURCE=1)
+-endif()
+-
+-if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
+- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-return-type -Wno-implicit-function-declaration -Wno-absolute-value -Wno-parentheses -Wno-logical-op-parentheses -Wno-dangling-else -Wno-format")
+-endif()
+-if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
+- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-result -Wformat-overflow=0 -Wformat=0 -Wno-format-security -Wno-implicit-function-declaration")
+-endif()
+-
+-
+-
+-rdkit_library(avalon_clib ${avalon_clib_srcs})
+-target_compile_definitions(avalon_clib PRIVATE RDKIT_AVALONLIB_BUILD)
+-if((MSVC AND RDK_INSTALL_DLLS_MSVC) OR ((NOT MSVC) AND WIN32))
+- set_target_properties(avalon_clib PROPERTIES WINDOWS_EXPORT_ALL_SYMBOLS TRUE)
+-endif()
+-
+-include_directories(${CMAKE_CURRENT_SOURCE_DIR})
+-include_directories(${AVALON_SRC_PATH})
+-
+ rdkit_library(AvalonLib AvalonTools.cpp SHARED
+- LINK_LIBRARIES avalon_clib SubstructMatch FileParsers SmilesParse GraphMol DataStructs )
++ LINK_LIBRARIES -lavalontoolkit SubstructMatch FileParsers SmilesParse GraphMol DataStructs )
+ target_compile_definitions(AvalonLib PRIVATE RDKIT_AVALONLIB_BUILD)
+ rdkit_headers(AvalonTools.h DEST GraphMol)
+ rdkit_test(testAvalonLib1 test1.cpp
+diff --git a/External/AvalonTools/Wrap/pyAvalonTools.cpp b/External/AvalonTools/Wrap/pyAvalonTools.cpp
+index fb24c497e..31997bc0a 100644
+--- a/External/AvalonTools/Wrap/pyAvalonTools.cpp
++++ b/External/AvalonTools/Wrap/pyAvalonTools.cpp
+@@ -12,7 +12,7 @@
+ #include <boost/cstdint.hpp>
+
+ extern "C" {
+-#include "struchk.h"
++#include <avalontoolkit/struchk.h>
+ }
+
+ namespace python = boost::python;
+diff --git a/External/FreeSASA/CMakeLists.txt b/External/FreeSASA/CMakeLists.txt
+index 43dfbdc40..81165a143 100644
+--- a/External/FreeSASA/CMakeLists.txt
++++ b/External/FreeSASA/CMakeLists.txt
+@@ -2,88 +2,8 @@ if(NOT RDK_BUILD_FREESASA_SUPPORT)
+ return()
+ endif(NOT RDK_BUILD_FREESASA_SUPPORT)
+
+-if(NOT DEFINED FREESASA_DIR)
+- set(FREESASA_DIR "${CMAKE_CURRENT_SOURCE_DIR}/freesasa")
+- set(needDownload "TRUE")
+- if(EXISTS "${FREESASA_DIR}/src/freesasa.h")
+- set(needDownload "FALSE")
+- endif()
+-endif()
+-
+-if(needDownload)
+- # don't actually use the md5 here
+- set(FREESASA_VERSION "2.0.3")
+- set(FREESASA_SRC_DIR "${CMAKE_CURRENT_SOURCE_DIR}/freesasa-${FREESASA_VERSION}")
+- if(NOT EXISTS "${FREESASA_SRC_DIR}/src")
+- downloadAndCheckMD5("https://github.com/mittinatten/freesasa/releases/download/${FREESASA_VERSION}/freesasa-${FREESASA_VERSION}.tar.gz"
+- "${CMAKE_CURRENT_SOURCE_DIR}/master.tar.gz"
+- "")
+- execute_process(COMMAND ${CMAKE_COMMAND} -E tar zxf
+- ${CMAKE_CURRENT_SOURCE_DIR}/master.tar.gz
+- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
+- message("Copying freesasa2.c to ${FREESASA_SRC_DIR}/src/freesasa2.c")
+- file(COPY freesasa2.c DESTINATION ${FREESASA_SRC_DIR}/src/)
+- else()
+- message("FreeSASA found in ${FREESASA_SRC_DIR}, no download required")
+- endif()
+-endif()
+-
+-set(CMAKE_C_STANDARD 99)
+-
+-set (freesasa_clib_srcs
+- ${FREESASA_SRC_DIR}/src/classifier.c
+- ${FREESASA_SRC_DIR}/src/classifier_naccess.c
+- ${FREESASA_SRC_DIR}/src/classifier_oons.c
+- ${FREESASA_SRC_DIR}/src/classifier_protor.c
+- ${FREESASA_SRC_DIR}/src/coord.c
+- ${FREESASA_SRC_DIR}/src/freesasa2.c
+- ${FREESASA_SRC_DIR}/src/lexer.c
+- ${FREESASA_SRC_DIR}/src/log.c
+- ${FREESASA_SRC_DIR}/src/nb.c
+- ${FREESASA_SRC_DIR}/src/node.c
+- ${FREESASA_SRC_DIR}/src/parser.c
+- ${FREESASA_SRC_DIR}/src/pdb.c
+- ${FREESASA_SRC_DIR}/src/rsa.c
+- ${FREESASA_SRC_DIR}/src/sasa_lr.c
+- ${FREESASA_SRC_DIR}/src/sasa_sr.c
+- ${FREESASA_SRC_DIR}/src/selection.c
+- ${FREESASA_SRC_DIR}/src/structure.c
+- ${FREESASA_SRC_DIR}/src/util.c
+- )
+-if((MSVC AND RDK_INSTALL_DLLS_MSVC) OR ((NOT MSVC) AND WIN32))
+- set(freesasa_additional_exports ${CMAKE_CURRENT_SOURCE_DIR}/additional_exports.def)
+- file(WRITE ${freesasa_additional_exports}
+- "EXPORTS\n"
+- "freesasa_default_parameters DATA\n"
+- "freesasa_protor_classifier DATA\n"
+- "freesasa_naccess_classifier DATA\n"
+- "freesasa_oons_classifier DATA\n"
+- )
+- set (freesasa_clib_srcs
+- ${freesasa_clib_srcs}
+- ${freesasa_additional_exports}
+- )
+-endif()
+-
+-set (freesasa_h ${FREESASA_SRC_DIR}/src/freesasa.h)
+-file(READ ${freesasa_h} freesasa_h_data)
+-string(REGEX REPLACE "(#include <stdio.h>)" "\\1\n#include <RDGeneral/export.h>" freesasa_h_data "${freesasa_h_data}")
+-string(REGEX REPLACE "([^R][^D][^K][^I][^T][^_][^F][^R][^E][^E][^S][^A][^S][^A][^_][^C][^L][^I][^B][^_][^E][^X][^P][^O][^R][^T][^ ])(extern const)" "\\1RDKIT_FREESASA_CLIB_EXPORT \\2" freesasa_h_data "${freesasa_h_data}")
+-file(WRITE ${freesasa_h} "${freesasa_h_data}")
+-
+-add_definitions(-DUSE_THREADS=0)
+-add_definitions(-DUSE_JSON=0)
+-add_definitions(-DUSE_XML=0)
+-rdkit_library(freesasa_clib ${freesasa_clib_srcs})
+-target_compile_definitions(freesasa_clib PRIVATE RDKIT_FREESASALIB_BUILD)
+-
+-if((MSVC AND RDK_INSTALL_DLLS_MSVC) OR ((NOT MSVC) AND WIN32))
+- set_target_properties(freesasa_clib PROPERTIES WINDOWS_EXPORT_ALL_SYMBOLS TRUE)
+-endif()
+-include_directories("${FREESASA_SRC_DIR}/src")
+-
+ rdkit_library(FreeSASALib RDFreeSASA.cpp SHARED
+- LINK_LIBRARIES freesasa_clib GraphMol )
++ LINK_LIBRARIES -lfreesasa GraphMol )
+ target_compile_definitions(FreeSASALib PRIVATE RDKIT_FREESASALIB_BUILD)
+
+ rdkit_headers(RDFreeSASA.h DEST GraphMol)
+diff --git a/External/RingFamilies/CMakeLists.txt b/External/RingFamilies/CMakeLists.txt
+index 08dd1fe04..66ecd5834 100644
+--- a/External/RingFamilies/CMakeLists.txt
++++ b/External/RingFamilies/CMakeLists.txt
+@@ -1,47 +1,6 @@
+-add_custom_target(ringdecomposerlib_support ALL)
+-
+ if(NOT RDK_USE_URF)
+ return()
+ endif(NOT RDK_USE_URF)
+
+-if(NOT DEFINED URFLIB_DIR)
+- set(URFLIB_DIR "${CMAKE_CURRENT_SOURCE_DIR}/RingDecomposerLib/src/RingDecomposerLib")
+-endif()
+-
+-if(NOT EXISTS "${URFLIB_DIR}/RingDecomposerLib.h")
+- set(RELEASE_NO "1.1.3_rdkit")
+- set(MD5 "e9a0bcdda8b921a35e812b9888a9a874")
+- downloadAndCheckMD5("https://github.com/rareylab/RingDecomposerLib/archive/v${RELEASE_NO}.tar.gz"
+- "${CMAKE_CURRENT_SOURCE_DIR}/RingDecomposerLib-v${RELEASE_NO}.tar.gz" ${MD5})
+- execute_process(COMMAND ${CMAKE_COMMAND} -E tar zxf
+- ${CMAKE_CURRENT_SOURCE_DIR}/RingDecomposerLib-v${RELEASE_NO}.tar.gz
+- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
+- file(RENAME "RingDecomposerLib-${RELEASE_NO}" "RingDecomposerLib")
+-else()
+- message("-- Found RingDecomposerLib source in ${URFLIB_DIR}")
+-endif()
+-
+-set(urflib_INCLUDE_DIRS ${URFLIB_DIR}
+- CACHE STRING "RingDecomposerLib Include Dir" FORCE)
+-file(GLOB URFSOURCES "${URFLIB_DIR}/*.c")
+-
+-#if((MSVC AND RDK_INSTALL_DLLS_MSVC) OR ((NOT MSVC) AND WIN32 AND (NOT RDK_INSTALL_STATIC_LIBS)))
+-if(WIN32)
+- set (ringdecomposerlib_h ${URFLIB_DIR}/RingDecomposerLib.h)
+- file(READ ${ringdecomposerlib_h} ringdecomposerlib_h_data)
+- if (NOT "${ringdecomposerlib_h_data}" MATCHES "RDKIT_URFLIB_BUILD")
+- string(REGEX REPLACE "(#if[ ]+\\([ ]*defined[ ]*\\([ ]*_WIN32[ ]*\\)[ ]*&&[ ]*)(defined\\([ ]*_MSC_VER[ ]*\\))" "\\1!defined( RDL_WIN_STATIC )" ringdecomposerlib_h_data "${ringdecomposerlib_h_data}")
+- string(REGEX REPLACE "([ ]*)(#define[ ]+RDL_API[ ]+__declspec)(\\([ ]*dllexport[ ]*\\))" "\\1#ifdef RDKIT_URFLIB_BUILD\n\\1\\1\\2\\3\n\\1#else\n\\1\\1\\2(dllimport)\n\\1#endif" ringdecomposerlib_h_data "${ringdecomposerlib_h_data}")
+- file(WRITE ${ringdecomposerlib_h} "${ringdecomposerlib_h_data}")
+- endif()
+-endif()
+-rdkit_library(RingDecomposerLib ${URFSOURCES} SHARED)
+-if((MSVC AND RDK_INSTALL_DLLS_MSVC) OR ((NOT MSVC) AND WIN32 AND (NOT RDK_INSTALL_STATIC_LIBS)))
+- target_compile_definitions(RingDecomposerLib PRIVATE RDKIT_URFLIB_BUILD)
+-endif()
+-install(TARGETS RingDecomposerLib DESTINATION ${RDKit_LibDir})
+-rdkit_headers(${URFLIB_DIR}/RingDecomposerLib.h DEST "")
+-
+-
+-set(RDK_URF_LIBS RingDecomposerLib
+- CACHE STRING "the libraries for the URF calculation" FORCE)
++rdkit_library(RingDecomposerLib dummy.cpp SHARED LINK_LIBRARIES -lRingDecomposerLib)
++set(RDK_URF_LIBS RingDecomposerLib CACHE STRING "" FORCE)
+diff --git a/External/RingFamilies/dummy.cpp b/External/RingFamilies/dummy.cpp
+new file mode 100644
+index 000000000..e69de29bb
+diff --git a/External/YAeHMOP/CMakeLists.txt b/External/YAeHMOP/CMakeLists.txt
+index f1027b3bd..8bee2f910 100644
+--- a/External/YAeHMOP/CMakeLists.txt
++++ b/External/YAeHMOP/CMakeLists.txt
+@@ -18,32 +18,8 @@ endif()
+
+ include_directories( ${RDKit_ExternalDir}/YAeHMOP )
+
+-ExternalProject_Add(yaehmop_project
+- GIT_REPOSITORY https://github.com/greglandrum/yaehmop.git
+- GIT_TAG master
+- UPDATE_COMMAND ""
+- PATCH_COMMAND ""
+- PREFIX ${CMAKE_CURRENT_SOURCE_DIR}
+- SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/yaehmop"
+- SOURCE_SUBDIR "tightbind"
+- CMAKE_ARGS -DUSE_BLAS_LAPACK=OFF -DCMAKE_INSTALL_PREFIX=${PROJECT_BINARY_DIR} -DCMAKE_C_FLAGS=${CMAKE_C_FLAGS} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
+- TEST_COMMAND "")
+-
+-include_directories(${PROJECT_BINARY_DIR}/include)
+-link_directories(${PROJECT_BINARY_DIR}/lib)
+-link_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/yaehmop_project-build)
+-
+-set(EHT_PARAM_FILE ${CMAKE_CURRENT_SOURCE_DIR}/yaehmop/tightbind/eht_parms.dat )
+-install(FILES ${EHT_PARAM_FILE}
+- DESTINATION ${RDKit_ShareDir}/Data
+- COMPONENT data)
+-
+-message("YAeHMOP include_dirs: ${PROJECT_BINARY_DIR}/include")
+-message("YAeHMOP link_dirs: ${PROJECT_BINARY_DIR}/lib ${CMAKE_CURRENT_SOURCE_DIR}/src/yaehmop_project-build")
+-
+-rdkit_library(EHTLib EHTTools.cpp SHARED LINK_LIBRARIES yaehmop_eht GraphMol )
++rdkit_library(EHTLib EHTTools.cpp SHARED LINK_LIBRARIES -lyaehmop_eht GraphMol )
+ target_compile_definitions(EHTLib PRIVATE RDKIT_EHTLIB_BUILD)
+-add_dependencies(EHTLib yaehmop_project)
+ rdkit_headers(EHTTools.h DEST GraphMol)
+ rdkit_catch_test(testEHTLib1 test1.cpp
+ LINK_LIBRARIES EHTLib FileParsers SmilesParse )
+diff --git a/External/YAeHMOP/EHTTools.cpp b/External/YAeHMOP/EHTTools.cpp
+index 7a229f51f..71033dc5c 100644
+--- a/External/YAeHMOP/EHTTools.cpp
++++ b/External/YAeHMOP/EHTTools.cpp
+@@ -10,7 +10,7 @@
+ #include <fstream>
+
+ extern "C" {
+-#include <yaehmop/tightbind/bind.h>
++#include <yaehmop/bind.h>
+ }
+
+ namespace RDKit {
diff --git a/gnu/packages/patches/rottlog-direntry.patch b/gnu/packages/patches/rottlog-direntry.patch
new file mode 100644
index 0000000000..176d91dc0f
--- /dev/null
+++ b/gnu/packages/patches/rottlog-direntry.patch
@@ -0,0 +1,18 @@
+Add a direntry to the Rottlog info manual.
+
+diff --git a/doc/rottlog.texi b/doc/rottlog.texi
+index 0ba8019..0f29916 100644
+--- a/doc/rottlog.texi
++++ b/doc/rottlog.texi
+@@ -18,6 +18,11 @@ Documentation License.''
+ @end quotation
+ @end copying
+
++@dircategory System administration
++@direntry
++* Rottlog: (rottlog). The GNU log management utility.
++@end direntry
++
+ @paragraphindent none
+
+ @iftex
diff --git a/gnu/packages/patches/ruby-sanitize-system-libxml.patch b/gnu/packages/patches/ruby-sanitize-system-libxml.patch
deleted file mode 100644
index d19eb07294..0000000000
--- a/gnu/packages/patches/ruby-sanitize-system-libxml.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-Fix test failures that occur when nokogiri is using system libxml:
-
- https://github.com/rgrove/sanitize/issues/198
-
-Taken from upstream:
-https://github.com/rgrove/sanitize/commit/21da9b62baf9ea659811d92e6b574130aee57eba
-
-diff --git a/test/test_malicious_html.rb b/test/test_malicious_html.rb
-index 2c23074..0756de0 100644
---- a/test/test_malicious_html.rb
-+++ b/test/test_malicious_html.rb
-@@ -135,6 +135,8 @@
- # The relevant libxml2 code is here:
- # <https://github.com/GNOME/libxml2/commit/960f0e275616cadc29671a218d7fb9b69eb35588>
- describe 'unsafe libxml2 server-side includes in attributes' do
-+ using_unpatched_libxml2 = Nokogiri::VersionInfo.instance.libxml2_using_system?
-+
- tag_configs = [
- {
- tag_name: 'a',
-@@ -166,6 +168,8 @@
- input = %[<#{tag_name} #{attr_name}='examp<!--" onmouseover=alert(1)>-->le.com'>foo</#{tag_name}>]
-
- it 'should escape unsafe characters in attributes' do
-+ skip "behavior should only exist in nokogiri's patched libxml" if using_unpatched_libxml2
-+
- # This uses Nokogumbo's HTML-compliant serializer rather than
- # libxml2's.
- @s.fragment(input).
-@@ -191,6 +195,8 @@
- input = %[<#{tag_name} #{attr_name}='examp<!--" onmouseover=alert(1)>-->le.com'>foo</#{tag_name}>]
-
- it 'should not escape characters unnecessarily' do
-+ skip "behavior should only exist in nokogiri's patched libxml" if using_unpatched_libxml2
-+
- # This uses Nokogumbo's HTML-compliant serializer rather than
- # libxml2's.
- @s.fragment(input).
diff --git a/gnu/packages/patches/rust-1.64-fix-riscv64-bootstrap.patch b/gnu/packages/patches/rust-1.64-fix-riscv64-bootstrap.patch
new file mode 100644
index 0000000000..4567f81224
--- /dev/null
+++ b/gnu/packages/patches/rust-1.64-fix-riscv64-bootstrap.patch
@@ -0,0 +1,565 @@
+https://github.com/rust-lang/rust/commit/263edd43c5255084292329423c61a9d69715ebfa.patch
+https://github.com/rust-lang/rust/issues/102155
+Issue seen on native builds on riscv64 across multiple Linux
+Distributions. An alternative workaround appears to be building stage 1
+with debug enabled.
+
+From 27412d1e3e128349bc515c16ce882860e20f037d Mon Sep 17 00:00:00 2001
+From: 5225225 <5225225@mailbox.org>
+Date: Thu, 14 Jul 2022 22:42:47 +0100
+Subject: [PATCH] Use constant eval to do strict validity checks
+
+---
+ Cargo.lock | 1 +
+ .../src/intrinsics/mod.rs | 15 +----
+ compiler/rustc_codegen_ssa/Cargo.toml | 1 +
+ compiler/rustc_codegen_ssa/src/mir/block.rs | 9 ++-
+ .../src/const_eval/machine.rs | 2 +-
+ .../src/interpret/intrinsics.rs | 56 ++++++++--------
+ compiler/rustc_const_eval/src/lib.rs | 6 ++
+ .../src/might_permit_raw_init.rs | 40 +++++++++++
+ compiler/rustc_middle/src/query/mod.rs | 8 +++
+ compiler/rustc_middle/src/ty/query.rs | 1 +
+ compiler/rustc_query_impl/src/keys.rs | 12 +++-
+ compiler/rustc_target/src/abi/mod.rs | 38 +++++------
+ .../intrinsics/panic-uninitialized-zeroed.rs | 66 ++++++++++++-------
+ 13 files changed, 161 insertions(+), 94 deletions(-)
+ create mode 100644 compiler/rustc_const_eval/src/might_permit_raw_init.rs
+
+diff --git a/Cargo.lock b/Cargo.lock
+index 147d47044078a..dd6f0345affd0 100644
+--- a/Cargo.lock
++++ b/Cargo.lock
+@@ -3664,6 +3664,7 @@ dependencies = [
+ "rustc_arena",
+ "rustc_ast",
+ "rustc_attr",
++ "rustc_const_eval",
+ "rustc_data_structures",
+ "rustc_errors",
+ "rustc_fs_util",
+diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs
+index eafae1cdc8af0..4b2207f375879 100644
+--- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs
++++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs
+@@ -58,7 +58,6 @@ pub(crate) use llvm::codegen_llvm_intrinsic_call;
+ use rustc_middle::ty::print::with_no_trimmed_paths;
+ use rustc_middle::ty::subst::SubstsRef;
+ use rustc_span::symbol::{kw, sym, Symbol};
+-use rustc_target::abi::InitKind;
+
+ use crate::prelude::*;
+ use cranelift_codegen::ir::AtomicRmwOp;
+@@ -672,12 +671,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
+ return;
+ }
+
+- if intrinsic == sym::assert_zero_valid
+- && !layout.might_permit_raw_init(
+- fx,
+- InitKind::Zero,
+- fx.tcx.sess.opts.unstable_opts.strict_init_checks) {
+-
++ if intrinsic == sym::assert_zero_valid && !fx.tcx.permits_zero_init(layout) {
+ with_no_trimmed_paths!({
+ crate::base::codegen_panic(
+ fx,
+@@ -688,12 +682,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
+ return;
+ }
+
+- if intrinsic == sym::assert_uninit_valid
+- && !layout.might_permit_raw_init(
+- fx,
+- InitKind::Uninit,
+- fx.tcx.sess.opts.unstable_opts.strict_init_checks) {
+-
++ if intrinsic == sym::assert_uninit_valid && !fx.tcx.permits_uninit_init(layout) {
+ with_no_trimmed_paths!({
+ crate::base::codegen_panic(
+ fx,
+diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml
+index faabea92f5a6c..81c8b9ceb136e 100644
+--- a/compiler/rustc_codegen_ssa/Cargo.toml
++++ b/compiler/rustc_codegen_ssa/Cargo.toml
+@@ -40,6 +40,7 @@ rustc_metadata = { path = "../rustc_metadata" }
+ rustc_query_system = { path = "../rustc_query_system" }
+ rustc_target = { path = "../rustc_target" }
+ rustc_session = { path = "../rustc_session" }
++rustc_const_eval = { path = "../rustc_const_eval" }
+
+ [dependencies.object]
+ version = "0.29.0"
+diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
+index 745da821c9d76..773c55cf551d5 100644
+--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
++++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
+@@ -22,7 +22,7 @@ use rustc_span::source_map::Span;
+ use rustc_span::{sym, Symbol};
+ use rustc_symbol_mangling::typeid_for_fnabi;
+ use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
+-use rustc_target::abi::{self, HasDataLayout, InitKind, WrappingRange};
++use rustc_target::abi::{self, HasDataLayout, WrappingRange};
+ use rustc_target::spec::abi::Abi;
+
+ /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
+@@ -528,7 +528,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
+ source_info: mir::SourceInfo,
+ target: Option<mir::BasicBlock>,
+ cleanup: Option<mir::BasicBlock>,
+- strict_validity: bool,
+ ) -> bool {
+ // Emit a panic or a no-op for `assert_*` intrinsics.
+ // These are intrinsics that compile to panics so that we can get a message
+@@ -547,12 +546,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
+ });
+ if let Some(intrinsic) = panic_intrinsic {
+ use AssertIntrinsic::*;
++
+ let ty = instance.unwrap().substs.type_at(0);
+ let layout = bx.layout_of(ty);
+ let do_panic = match intrinsic {
+ Inhabited => layout.abi.is_uninhabited(),
+- ZeroValid => !layout.might_permit_raw_init(bx, InitKind::Zero, strict_validity),
+- UninitValid => !layout.might_permit_raw_init(bx, InitKind::Uninit, strict_validity),
++ ZeroValid => !bx.tcx().permits_zero_init(layout),
++ UninitValid => !bx.tcx().permits_uninit_init(layout),
+ };
+ if do_panic {
+ let msg_str = with_no_visible_paths!({
+@@ -687,7 +687,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
+ source_info,
+ target,
+ cleanup,
+- self.cx.tcx().sess.opts.unstable_opts.strict_init_checks,
+ ) {
+ return;
+ }
+diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs
+index 29ab1d187719c..e00e667fb71e2 100644
+--- a/compiler/rustc_const_eval/src/const_eval/machine.rs
++++ b/compiler/rustc_const_eval/src/const_eval/machine.rs
+@@ -104,7 +104,7 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
+ }
+
+ impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
+- pub(super) fn new(const_eval_limit: Limit, can_access_statics: bool) -> Self {
++ pub(crate) fn new(const_eval_limit: Limit, can_access_statics: bool) -> Self {
+ CompileTimeInterpreter {
+ steps_remaining: const_eval_limit.0,
+ stack: Vec::new(),
+diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
+index e2a8a9891f72f..7827fb8395b7f 100644
+--- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs
++++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
+@@ -15,7 +15,7 @@ use rustc_middle::ty::layout::LayoutOf as _;
+ use rustc_middle::ty::subst::SubstsRef;
+ use rustc_middle::ty::{Ty, TyCtxt};
+ use rustc_span::symbol::{sym, Symbol};
+-use rustc_target::abi::{Abi, Align, InitKind, Primitive, Size};
++use rustc_target::abi::{Abi, Align, Primitive, Size};
+
+ use super::{
+ util::ensure_monomorphic_enough, CheckInAllocMsg, ImmTy, InterpCx, Machine, OpTy, PlaceTy,
+@@ -413,35 +413,33 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
+ ),
+ )?;
+ }
+- if intrinsic_name == sym::assert_zero_valid
+- && !layout.might_permit_raw_init(
+- self,
+- InitKind::Zero,
+- self.tcx.sess.opts.unstable_opts.strict_init_checks,
+- )
+- {
+- M::abort(
+- self,
+- format!(
+- "aborted execution: attempted to zero-initialize type `{}`, which is invalid",
+- ty
+- ),
+- )?;
++
++ if intrinsic_name == sym::assert_zero_valid {
++ let should_panic = !self.tcx.permits_zero_init(layout);
++
++ if should_panic {
++ M::abort(
++ self,
++ format!(
++ "aborted execution: attempted to zero-initialize type `{}`, which is invalid",
++ ty
++ ),
++ )?;
++ }
+ }
+- if intrinsic_name == sym::assert_uninit_valid
+- && !layout.might_permit_raw_init(
+- self,
+- InitKind::Uninit,
+- self.tcx.sess.opts.unstable_opts.strict_init_checks,
+- )
+- {
+- M::abort(
+- self,
+- format!(
+- "aborted execution: attempted to leave type `{}` uninitialized, which is invalid",
+- ty
+- ),
+- )?;
++
++ if intrinsic_name == sym::assert_uninit_valid {
++ let should_panic = !self.tcx.permits_uninit_init(layout);
++
++ if should_panic {
++ M::abort(
++ self,
++ format!(
++ "aborted execution: attempted to leave type `{}` uninitialized, which is invalid",
++ ty
++ ),
++ )?;
++ }
+ }
+ }
+ sym::simd_insert => {
+diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs
+index d65d4f7eb720e..72ac6af685dc4 100644
+--- a/compiler/rustc_const_eval/src/lib.rs
++++ b/compiler/rustc_const_eval/src/lib.rs
+@@ -33,11 +33,13 @@ extern crate rustc_middle;
+ pub mod const_eval;
+ mod errors;
+ pub mod interpret;
++mod might_permit_raw_init;
+ pub mod transform;
+ pub mod util;
+
+ use rustc_middle::ty;
+ use rustc_middle::ty::query::Providers;
++use rustc_target::abi::InitKind;
+
+ pub fn provide(providers: &mut Providers) {
+ const_eval::provide(providers);
+@@ -59,4 +61,8 @@ pub fn provide(providers: &mut Providers) {
+ let (param_env, value) = param_env_and_value.into_parts();
+ const_eval::deref_mir_constant(tcx, param_env, value)
+ };
++ providers.permits_uninit_init =
++ |tcx, ty| might_permit_raw_init::might_permit_raw_init(tcx, ty, InitKind::Uninit);
++ providers.permits_zero_init =
++ |tcx, ty| might_permit_raw_init::might_permit_raw_init(tcx, ty, InitKind::Zero);
+ }
+diff --git a/compiler/rustc_const_eval/src/might_permit_raw_init.rs b/compiler/rustc_const_eval/src/might_permit_raw_init.rs
+new file mode 100644
+index 0000000000000..f971c2238c7bb
+--- /dev/null
++++ b/compiler/rustc_const_eval/src/might_permit_raw_init.rs
+@@ -0,0 +1,40 @@
++use crate::const_eval::CompileTimeInterpreter;
++use crate::interpret::{InterpCx, MemoryKind, OpTy};
++use rustc_middle::ty::layout::LayoutCx;
++use rustc_middle::ty::{layout::TyAndLayout, ParamEnv, TyCtxt};
++use rustc_session::Limit;
++use rustc_target::abi::InitKind;
++
++pub fn might_permit_raw_init<'tcx>(
++ tcx: TyCtxt<'tcx>,
++ ty: TyAndLayout<'tcx>,
++ kind: InitKind,
++) -> bool {
++ let strict = tcx.sess.opts.unstable_opts.strict_init_checks;
++
++ if strict {
++ let machine = CompileTimeInterpreter::new(Limit::new(0), false);
++
++ let mut cx = InterpCx::new(tcx, rustc_span::DUMMY_SP, ParamEnv::reveal_all(), machine);
++
++ let allocated = cx
++ .allocate(ty, MemoryKind::Machine(crate::const_eval::MemoryKind::Heap))
++ .expect("OOM: failed to allocate for uninit check");
++
++ if kind == InitKind::Zero {
++ cx.write_bytes_ptr(
++ allocated.ptr,
++ std::iter::repeat(0_u8).take(ty.layout.size().bytes_usize()),
++ )
++ .expect("failed to write bytes for zero valid check");
++ }
++
++ let ot: OpTy<'_, _> = allocated.into();
++
++ // Assume that if it failed, it's a validation failure.
++ cx.validate_operand(&ot).is_ok()
++ } else {
++ let layout_cx = LayoutCx { tcx, param_env: ParamEnv::reveal_all() };
++ ty.might_permit_raw_init(&layout_cx, kind)
++ }
++}
+diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs
+index bdae7e5fcd6b1..0581ef41f66c2 100644
+--- a/compiler/rustc_middle/src/query/mod.rs
++++ b/compiler/rustc_middle/src/query/mod.rs
+@@ -2053,4 +2053,12 @@ rustc_queries! {
+ desc { |tcx| "looking up generator diagnostic data of `{}`", tcx.def_path_str(key) }
+ separate_provide_extern
+ }
++
++ query permits_uninit_init(key: TyAndLayout<'tcx>) -> bool {
++ desc { "checking to see if {:?} permits being left uninit", key.ty }
++ }
++
++ query permits_zero_init(key: TyAndLayout<'tcx>) -> bool {
++ desc { "checking to see if {:?} permits being left zeroed", key.ty }
++ }
+ }
+diff --git a/compiler/rustc_middle/src/ty/query.rs b/compiler/rustc_middle/src/ty/query.rs
+index 3d662ed5de4ba..2452bcf6a61b8 100644
+--- a/compiler/rustc_middle/src/ty/query.rs
++++ b/compiler/rustc_middle/src/ty/query.rs
+@@ -28,6 +28,7 @@ use crate::traits::query::{
+ use crate::traits::specialization_graph;
+ use crate::traits::{self, ImplSource};
+ use crate::ty::fast_reject::SimplifiedType;
++use crate::ty::layout::TyAndLayout;
+ use crate::ty::subst::{GenericArg, SubstsRef};
+ use crate::ty::util::AlwaysRequiresDrop;
+ use crate::ty::GeneratorDiagnosticData;
+diff --git a/compiler/rustc_query_impl/src/keys.rs b/compiler/rustc_query_impl/src/keys.rs
+index 6fbafeb1d32b3..5477431431374 100644
+--- a/compiler/rustc_query_impl/src/keys.rs
++++ b/compiler/rustc_query_impl/src/keys.rs
+@@ -6,7 +6,7 @@ use rustc_middle::mir;
+ use rustc_middle::traits;
+ use rustc_middle::ty::fast_reject::SimplifiedType;
+ use rustc_middle::ty::subst::{GenericArg, SubstsRef};
+-use rustc_middle::ty::{self, Ty, TyCtxt};
++use rustc_middle::ty::{self, layout::TyAndLayout, Ty, TyCtxt};
+ use rustc_span::symbol::{Ident, Symbol};
+ use rustc_span::{Span, DUMMY_SP};
+
+@@ -385,6 +385,16 @@ impl<'tcx> Key for Ty<'tcx> {
+ }
+ }
+
++impl<'tcx> Key for TyAndLayout<'tcx> {
++ #[inline(always)]
++ fn query_crate_is_local(&self) -> bool {
++ true
++ }
++ fn default_span(&self, _: TyCtxt<'_>) -> Span {
++ DUMMY_SP
++ }
++}
++
+ impl<'tcx> Key for (Ty<'tcx>, Ty<'tcx>) {
+ #[inline(always)]
+ fn query_crate_is_local(&self) -> bool {
+diff --git a/compiler/rustc_target/src/abi/mod.rs b/compiler/rustc_target/src/abi/mod.rs
+index d1eafd6ac5fb8..6f4d073d70486 100644
+--- a/compiler/rustc_target/src/abi/mod.rs
++++ b/compiler/rustc_target/src/abi/mod.rs
+@@ -1372,7 +1372,7 @@ pub struct PointeeInfo {
+
+ /// Used in `might_permit_raw_init` to indicate the kind of initialisation
+ /// that is checked to be valid
+-#[derive(Copy, Clone, Debug)]
++#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+ pub enum InitKind {
+ Zero,
+ Uninit,
+@@ -1487,14 +1487,18 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
+ ///
+ /// `init_kind` indicates if the memory is zero-initialized or left uninitialized.
+ ///
+- /// `strict` is an opt-in debugging flag added in #97323 that enables more checks.
++ /// This code is intentionally conservative, and will not detect
++ /// * zero init of an enum whose 0 variant does not allow zero initialization
++ /// * making uninitialized types who have a full valid range (ints, floats, raw pointers)
++ /// * Any form of invalid value being made inside an array (unless the value is uninhabited)
+ ///
+- /// This is conservative: in doubt, it will answer `true`.
++ /// A strict form of these checks that uses const evaluation exists in
++ /// `rustc_const_eval::might_permit_raw_init`, and a tracking issue for making these checks
++ /// stricter is <https://github.com/rust-lang/rust/issues/66151>.
+ ///
+- /// FIXME: Once we removed all the conservatism, we could alternatively
+- /// create an all-0/all-undef constant and run the const value validator to see if
+- /// this is a valid value for the given type.
+- pub fn might_permit_raw_init<C>(self, cx: &C, init_kind: InitKind, strict: bool) -> bool
++ /// FIXME: Once all the conservatism is removed from here, and the checks are ran by default,
++ /// we can use the const evaluation checks always instead.
++ pub fn might_permit_raw_init<C>(self, cx: &C, init_kind: InitKind) -> bool
+ where
+ Self: Copy,
+ Ty: TyAbiInterface<'a, C>,
+@@ -1507,13 +1511,8 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
+ s.valid_range(cx).contains(0)
+ }
+ InitKind::Uninit => {
+- if strict {
+- // The type must be allowed to be uninit (which means "is a union").
+- s.is_uninit_valid()
+- } else {
+- // The range must include all values.
+- s.is_always_valid(cx)
+- }
++ // The range must include all values.
++ s.is_always_valid(cx)
+ }
+ }
+ };
+@@ -1534,19 +1533,12 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
+ // If we have not found an error yet, we need to recursively descend into fields.
+ match &self.fields {
+ FieldsShape::Primitive | FieldsShape::Union { .. } => {}
+- FieldsShape::Array { count, .. } => {
++ FieldsShape::Array { .. } => {
+ // FIXME(#66151): For now, we are conservative and do not check arrays by default.
+- if strict
+- && *count > 0
+- && !self.field(cx, 0).might_permit_raw_init(cx, init_kind, strict)
+- {
+- // Found non empty array with a type that is unhappy about this kind of initialization
+- return false;
+- }
+ }
+ FieldsShape::Arbitrary { offsets, .. } => {
+ for idx in 0..offsets.len() {
+- if !self.field(cx, idx).might_permit_raw_init(cx, init_kind, strict) {
++ if !self.field(cx, idx).might_permit_raw_init(cx, init_kind) {
+ // We found a field that is unhappy with this kind of initialization.
+ return false;
+ }
+diff --git a/src/test/ui/intrinsics/panic-uninitialized-zeroed.rs b/src/test/ui/intrinsics/panic-uninitialized-zeroed.rs
+index 3ffd35ecdb8da..255151a96032c 100644
+--- a/src/test/ui/intrinsics/panic-uninitialized-zeroed.rs
++++ b/src/test/ui/intrinsics/panic-uninitialized-zeroed.rs
+@@ -57,6 +57,13 @@ enum LR_NonZero {
+
+ struct ZeroSized;
+
++#[allow(dead_code)]
++#[repr(i32)]
++enum ZeroIsValid {
++ Zero(u8) = 0,
++ One(NonNull<()>) = 1,
++}
++
+ fn test_panic_msg<T>(op: impl (FnOnce() -> T) + panic::UnwindSafe, msg: &str) {
+ let err = panic::catch_unwind(op).err();
+ assert_eq!(
+@@ -152,33 +159,12 @@ fn main() {
+ "attempted to zero-initialize type `*const dyn core::marker::Send`, which is invalid"
+ );
+
+- /* FIXME(#66151) we conservatively do not error here yet.
+- test_panic_msg(
+- || mem::uninitialized::<LR_NonZero>(),
+- "attempted to leave type `LR_NonZero` uninitialized, which is invalid"
+- );
+- test_panic_msg(
+- || mem::zeroed::<LR_NonZero>(),
+- "attempted to zero-initialize type `LR_NonZero`, which is invalid"
+- );
+-
+- test_panic_msg(
+- || mem::uninitialized::<ManuallyDrop<LR_NonZero>>(),
+- "attempted to leave type `std::mem::ManuallyDrop<LR_NonZero>` uninitialized, \
+- which is invalid"
+- );
+- test_panic_msg(
+- || mem::zeroed::<ManuallyDrop<LR_NonZero>>(),
+- "attempted to zero-initialize type `std::mem::ManuallyDrop<LR_NonZero>`, \
+- which is invalid"
+- );
+- */
+-
+ test_panic_msg(
+ || mem::uninitialized::<(NonNull<u32>, u32, u32)>(),
+ "attempted to leave type `(core::ptr::non_null::NonNull<u32>, u32, u32)` uninitialized, \
+ which is invalid"
+ );
++
+ test_panic_msg(
+ || mem::zeroed::<(NonNull<u32>, u32, u32)>(),
+ "attempted to zero-initialize type `(core::ptr::non_null::NonNull<u32>, u32, u32)`, \
+@@ -196,11 +182,23 @@ fn main() {
+ which is invalid"
+ );
+
++ test_panic_msg(
++ || mem::uninitialized::<LR_NonZero>(),
++ "attempted to leave type `LR_NonZero` uninitialized, which is invalid"
++ );
++
++ test_panic_msg(
++ || mem::uninitialized::<ManuallyDrop<LR_NonZero>>(),
++ "attempted to leave type `core::mem::manually_drop::ManuallyDrop<LR_NonZero>` uninitialized, \
++ which is invalid"
++ );
++
+ test_panic_msg(
+ || mem::uninitialized::<NoNullVariant>(),
+ "attempted to leave type `NoNullVariant` uninitialized, \
+ which is invalid"
+ );
++
+ test_panic_msg(
+ || mem::zeroed::<NoNullVariant>(),
+ "attempted to zero-initialize type `NoNullVariant`, \
+@@ -212,10 +210,12 @@ fn main() {
+ || mem::uninitialized::<bool>(),
+ "attempted to leave type `bool` uninitialized, which is invalid"
+ );
++
+ test_panic_msg(
+ || mem::uninitialized::<LR>(),
+ "attempted to leave type `LR` uninitialized, which is invalid"
+ );
++
+ test_panic_msg(
+ || mem::uninitialized::<ManuallyDrop<LR>>(),
+ "attempted to leave type `core::mem::manually_drop::ManuallyDrop<LR>` uninitialized, which is invalid"
+@@ -229,6 +229,7 @@ fn main() {
+ let _val = mem::zeroed::<Option<&'static i32>>();
+ let _val = mem::zeroed::<MaybeUninit<NonNull<u32>>>();
+ let _val = mem::zeroed::<[!; 0]>();
++ let _val = mem::zeroed::<ZeroIsValid>();
+ let _val = mem::uninitialized::<MaybeUninit<bool>>();
+ let _val = mem::uninitialized::<[!; 0]>();
+ let _val = mem::uninitialized::<()>();
+@@ -259,12 +260,33 @@ fn main() {
+ || mem::zeroed::<[NonNull<()>; 1]>(),
+ "attempted to zero-initialize type `[core::ptr::non_null::NonNull<()>; 1]`, which is invalid"
+ );
++
++ // FIXME(#66151) we conservatively do not error here yet (by default).
++ test_panic_msg(
++ || mem::zeroed::<LR_NonZero>(),
++ "attempted to zero-initialize type `LR_NonZero`, which is invalid"
++ );
++
++ test_panic_msg(
++ || mem::zeroed::<ManuallyDrop<LR_NonZero>>(),
++ "attempted to zero-initialize type `core::mem::manually_drop::ManuallyDrop<LR_NonZero>`, \
++ which is invalid"
++ );
+ } else {
+ // These are UB because they have not been officially blessed, but we await the resolution
+ // of <https://github.com/rust-lang/unsafe-code-guidelines/issues/71> before doing
+ // anything about that.
+ let _val = mem::uninitialized::<i32>();
+ let _val = mem::uninitialized::<*const ()>();
++
++ // These are UB, but best to test them to ensure we don't become unintentionally
++ // stricter.
++
++ // It's currently unchecked to create invalid enums and values inside arrays.
++ let _val = mem::zeroed::<LR_NonZero>();
++ let _val = mem::zeroed::<[LR_NonZero; 1]>();
++ let _val = mem::zeroed::<[NonNull<()>; 1]>();
++ let _val = mem::uninitialized::<[NonNull<()>; 1]>();
+ }
+ }
+ }
diff --git a/gnu/packages/patches/rust-shell2batch-lint-fix.patch b/gnu/packages/patches/rust-shell2batch-lint-fix.patch
deleted file mode 100644
index 7c160b6cca..0000000000
--- a/gnu/packages/patches/rust-shell2batch-lint-fix.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-Resolves the following error that was most likely introduced in new version of
-rust as this package is from 2019:
-
-error: unnecessary parentheses around type
- --> src/converter.rs:108:61
- |
-108 | fn add_arguments(arguments: &str, additional_arguments: Vec<(String)>, pre: bool) -> String {
- | ^^^^^^^^ help: remove these parentheses
-
-Reported upstream at https://github.com/sagiegurari/shell2batch/issues/17.
-
-diff --git a/src/converter.rs b/src/converter.rs
-index fc87d68..af309d2 100644
---- a/src/converter.rs
-+++ b/src/converter.rs
-@@ -105,7 +105,7 @@ fn replace_vars(arguments: &str) -> String {
- updated_arguments
- }
-
--fn add_arguments(arguments: &str, additional_arguments: Vec<(String)>, pre: bool) -> String {
-+fn add_arguments(arguments: &str, additional_arguments: Vec<String>, pre: bool) -> String {
- let mut windows_arguments = if pre {
- "".to_string()
- } else {
-
diff --git a/gnu/packages/patches/rw-igraph-0.10.patch b/gnu/packages/patches/rw-igraph-0.10.patch
new file mode 100644
index 0000000000..3544196660
--- /dev/null
+++ b/gnu/packages/patches/rw-igraph-0.10.patch
@@ -0,0 +1,17 @@
+Fix the build when using igraph >= 0.10.
+Retrieved from: https://sourceforge.net/p/rankwidth/tickets/2/.
+
+--- rw-0.9/simplerw.c.newigraph 2017-02-14 00:20:35.000000000 +0900
++++ rw-0.9/simplerw.c 2022-09-11 19:39:47.033917305 +0900
+@@ -134,7 +134,11 @@ int read_graph(const char *format, const
+ igraph_destroy(&igraph);
+ return(-1);
+ }
++#if (IGRAPH_VERSION_MAJOR >= 1) || ((IGRAPH_VERSION_MAJOR == 0) && (IGRAPH_VERSION_MINOR >= 10))
++ igraph_get_adjacency(&igraph, &imatrix, IGRAPH_GET_ADJACENCY_BOTH, NULL, IGRAPH_LOOPS_ONCE);
++#else
+ igraph_get_adjacency(&igraph, &imatrix, IGRAPH_GET_ADJACENCY_BOTH, 0);
++#endif
+ igraph_destroy(&igraph);
+ if(igraph_matrix_nrow(&imatrix) > MAX_VERTICES)
+ {
diff --git a/gnu/packages/patches/sajson-build-with-gcc10.patch b/gnu/packages/patches/sajson-build-with-gcc10.patch
new file mode 100644
index 0000000000..878706dc79
--- /dev/null
+++ b/gnu/packages/patches/sajson-build-with-gcc10.patch
@@ -0,0 +1,45 @@
+This patch is from the upstream pull request
+https://github.com/chadaustin/sajson/pull/54.
+It fixes linking with GCC.
+
+diff --git a/include/sajson.h b/include/sajson.h
+index 8b4e05a..1bd045b 100644
+--- a/include/sajson.h
++++ b/include/sajson.h
+@@ -138,12 +138,17 @@ constexpr inline size_t make_element(tag t, size_t value) {
+ // header. This trick courtesy of Rich Geldreich's Purple JSON parser.
+ template <typename unused = void>
+ struct globals_struct {
++ static const unsigned char parse_flags[256];
++};
++typedef globals_struct<> globals;
++
+ // clang-format off
+
+ // bit 0 (1) - set if: plain ASCII string character
+ // bit 1 (2) - set if: whitespace
+ // bit 4 (0x10) - set if: 0-9 e E .
+- constexpr static const uint8_t parse_flags[256] = {
++ template <typename unused>
++ const unsigned char globals_struct<unused>::parse_flags[256] = {
+ // 0 1 2 3 4 5 6 7 8 9 A B C D E F
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 2, 0, 0, // 0
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 1
+@@ -162,15 +167,13 @@ struct globals_struct {
+ };
+
+ // clang-format on
+-};
+-typedef globals_struct<> globals;
+
+-constexpr inline bool is_plain_string_character(char c) {
++inline bool is_plain_string_character(char c) {
+ // return c >= 0x20 && c <= 0x7f && c != 0x22 && c != 0x5c;
+ return (globals::parse_flags[static_cast<unsigned char>(c)] & 1) != 0;
+ }
+
+-constexpr inline bool is_whitespace(char c) {
++inline bool is_whitespace(char c) {
+ // return c == '\r' || c == '\n' || c == '\t' || c == ' ';
+ return (globals::parse_flags[static_cast<unsigned char>(c)] & 2) != 0;
+ }
diff --git a/gnu/packages/patches/sajson-for-gemmi-numbers-as-strings.patch b/gnu/packages/patches/sajson-for-gemmi-numbers-as-strings.patch
new file mode 100644
index 0000000000..6f476b8583
--- /dev/null
+++ b/gnu/packages/patches/sajson-for-gemmi-numbers-as-strings.patch
@@ -0,0 +1,195 @@
+Patch for gemmi: Keep numbers in JSON file as strings.
+
+Adapted from this commit of the bundled fork of sajson in gemmi:
+https://github.com/project-gemmi/gemmi/commit/fccbca4f6040364ba708613e1429c2251872240d
+
+diff -ur a/include/sajson.h b/include/sajson.h
+--- a/include/sajson.h
++++ b/include/sajson.h
+@@ -411,43 +411,6 @@
+ };
+ } // namespace internal
+
+-namespace integer_storage {
+-enum { word_length = 1 };
+-
+-inline int load(const size_t* location) {
+- int value;
+- memcpy(&value, location, sizeof(value));
+- return value;
+-}
+-
+-inline void store(size_t* location, int value) {
+- // NOTE: Most modern compilers optimize away this constant-size
+- // memcpy into a single instruction. If any don't, and treat
+- // punning through a union as legal, they can be special-cased.
+- static_assert(
+- sizeof(value) <= sizeof(*location),
+- "size_t must not be smaller than int");
+- memcpy(location, &value, sizeof(value));
+-}
+-} // namespace integer_storage
+-
+-namespace double_storage {
+-enum { word_length = sizeof(double) / sizeof(size_t) };
+-
+-inline double load(const size_t* location) {
+- double value;
+- memcpy(&value, location, sizeof(double));
+- return value;
+-}
+-
+-inline void store(size_t* location, double value) {
+- // NOTE: Most modern compilers optimize away this constant-size
+- // memcpy into a single instruction. If any don't, and treat
+- // punning through a union as legal, they can be special-cased.
+- memcpy(location, &value, sizeof(double));
+-}
+-} // namespace double_storage
+-
+ /// Represents a JSON value. First, call get_type() to check its type,
+ /// which determines which methods are available.
+ ///
+@@ -585,70 +548,10 @@
+ return length;
+ }
+
+- /// If a numeric value was parsed as a 32-bit integer, returns it.
+- /// Only legal if get_type() is TYPE_INTEGER.
+- int get_integer_value() const {
+- assert_tag(tag::integer);
+- return integer_storage::load(payload);
+- }
+-
+- /// If a numeric value was parsed as a double, returns it.
+- /// Only legal if get_type() is TYPE_DOUBLE.
+- double get_double_value() const {
+- assert_tag(tag::double_);
+- return double_storage::load(payload);
+- }
+-
+- /// Returns a numeric value as a double-precision float.
+- /// Only legal if get_type() is TYPE_INTEGER or TYPE_DOUBLE.
+- double get_number_value() const {
+- assert_tag_2(tag::integer, tag::double_);
+- if (value_tag == tag::integer) {
+- return get_integer_value();
+- } else {
+- return get_double_value();
+- }
+- }
+-
+- /// Returns true and writes to the output argument if the numeric value
+- /// fits in a 53-bit integer. This is useful for timestamps and other
+- /// situations where integral values with greater than 32-bit precision
+- /// are used, as 64-bit values are not understood by all JSON
+- /// implementations or languages.
+- /// Returns false if the value is not an integer or not in range.
+- /// Only legal if get_type() is TYPE_INTEGER or TYPE_DOUBLE.
+- bool get_int53_value(int64_t* out) const {
+- // Make sure the output variable is always defined to avoid any
+- // possible situation like
+- // https://gist.github.com/chadaustin/2c249cb850619ddec05b23ca42cf7a18
+- *out = 0;
+-
+- assert_tag_2(tag::integer, tag::double_);
+- switch (value_tag) {
+- case tag::integer:
+- *out = get_integer_value();
+- return true;
+- case tag::double_: {
+- double v = get_double_value();
+- if (v < -(1LL << 53) || v > (1LL << 53)) {
+- return false;
+- }
+- int64_t as_int = static_cast<int64_t>(v);
+- if (as_int != v) {
+- return false;
+- }
+- *out = as_int;
+- return true;
+- }
+- default:
+- return false;
+- }
+- }
+-
+ /// Returns the length of the string.
+ /// Only legal if get_type() is TYPE_STRING.
+ size_t get_string_length() const {
+- assert_tag(tag::string);
++ assert_tag_3(tag::string, tag::integer, tag::double_);
+ return payload[1] - payload[0];
+ }
+
+@@ -659,7 +562,7 @@
+ /// embedded NULs.
+ /// Only legal if get_type() is TYPE_STRING.
+ const char* as_cstring() const {
+- assert_tag(tag::string);
++ assert_tag_3(tag::string, tag::integer, tag::double_);
+ return text + payload[0];
+ }
+
+@@ -667,7 +570,7 @@
+ /// Returns a string's value as a std::string.
+ /// Only legal if get_type() is TYPE_STRING.
+ std::string as_string() const {
+- assert_tag(tag::string);
++ assert_tag_3(tag::string, tag::integer, tag::double_);
+ return std::string(text + payload[0], text + payload[1]);
+ }
+ #endif
+@@ -690,6 +593,10 @@
+ assert(e1 == value_tag || e2 == value_tag);
+ }
+
++ void assert_tag_3(tag e1, tag e2, tag e3) const {
++ assert(e1 == value_tag || e2 == value_tag || e3 == value_tag);
++ }
++
+ void assert_in_bounds(size_t i) const { assert(i < get_length()); }
+
+ const tag value_tag;
+@@ -2059,6 +1966,8 @@
+ std::pair<char*, internal::tag> parse_number(char* p) {
+ using internal::tag;
+
++ size_t start = p - input.get_data();
++
+ // Assume 32-bit, two's complement integers.
+ static constexpr unsigned RISKY = INT_MAX / 10u;
+ unsigned max_digit_after_risky = INT_MAX % 10u;
+@@ -2235,23 +2144,18 @@
+ u = 0u - u;
+ }
+ }
++
++ bool success;
++ size_t* out = allocator.reserve(2, &success);
++ if (SAJSON_UNLIKELY(!success)) {
++ return std::make_pair(oom(p, "number"), tag::null);
++ }
++ out[0] = start;
++ out[1] = p - input.get_data();
++
+ if (try_double) {
+- bool success;
+- size_t* out
+- = allocator.reserve(double_storage::word_length, &success);
+- if (SAJSON_UNLIKELY(!success)) {
+- return std::make_pair(oom(p, "double"), tag::null);
+- }
+- double_storage::store(out, d);
+ return std::make_pair(p, tag::double_);
+ } else {
+- bool success;
+- size_t* out
+- = allocator.reserve(integer_storage::word_length, &success);
+- if (SAJSON_UNLIKELY(!success)) {
+- return std::make_pair(oom(p, "integer"), tag::null);
+- }
+- integer_storage::store(out, static_cast<int>(u));
+ return std::make_pair(p, tag::integer);
+ }
+ }
diff --git a/gnu/packages/patches/scotch-build-parallelism.patch b/gnu/packages/patches/scotch-build-parallelism.patch
deleted file mode 100644
index a669f1dbeb..0000000000
--- a/gnu/packages/patches/scotch-build-parallelism.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-Fixes make with -j, otherwise scotch.h may not be generated by the time make
-tries to compile library_graph_diam.o et al.
-
-Reported upstream at
-https://gforge.inria.fr/tracker/index.php?func=detail&aid=21651&group_id=248&atid=1081
-
-diff --git a/src/libscotch/Makefile b/src/libscotch/Makefile
-index 9898894..b0ee14c 100644
---- a/src/libscotch/Makefile
-+++ b/src/libscotch/Makefile
-@@ -2229,6 +2229,28 @@ library_graph_color_f$(OBJ) : library_graph_color_f.c \
- common.h \
- scotch.h
-
-+library_graph_diam$(OBJ) : library_graph_diam.c \
-+ module.h \
-+ common.h \
-+ graph.h \
-+ scotch.h
-+
-+library_graph_diam_f$(OBJ) : library_graph_diam.c \
-+ module.h \
-+ common.h \
-+ scotch.h
-+
-+library_graph_induce$(OBJ) : library_graph_diam.c \
-+ module.h \
-+ common.h \
-+ graph.h \
-+ scotch.h
-+
-+library_graph_induce_f$(OBJ) : library_graph_diam.c \
-+ module.h \
-+ common.h \
-+ scotch.h
-+
- library_graph_io_chac$(OBJ) : library_graph_io_chac.c \
- module.h \
- common.h \
diff --git a/gnu/packages/patches/scotch-integer-declarations.patch b/gnu/packages/patches/scotch-integer-declarations.patch
deleted file mode 100644
index 978625c1c0..0000000000
--- a/gnu/packages/patches/scotch-integer-declarations.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-diff --git a/src/libscotch/library.h b/src/libscotch/library.h
-index 1891c19..ecc0533 100644
---- a/src/libscotch/library.h
-+++ b/src/libscotch/library.h
-@@ -67,6 +67,8 @@
-
- /*+ Integer type. +*/
-
-+#include <stdint.h>
-+
- typedef DUMMYIDX SCOTCH_Idx;
-
- typedef DUMMYINT SCOTCH_Num;
-diff --git a/src/libscotchmetis/library_metis.h b/src/libscotchmetis/library_metis.h
-index e6767e1..04e71c3 100644
---- a/src/libscotchmetis/library_metis.h
-+++ b/src/libscotchmetis/library_metis.h
-@@ -99,6 +99,7 @@ typedef enum {
- */
-
- #ifndef SCOTCH_H /* In case "scotch.h" not included before */
-+#include <stdint.h>
- typedef DUMMYINT SCOTCH_Num;
- #endif /* SCOTCH_H */
-
-diff --git a/src/libscotchmetis/library_parmetis.h b/src/libscotchmetis/library_parmetis.h
-index 6d2f0b0..3c803fc 100644
---- a/src/libscotchmetis/library_parmetis.h
-+++ b/src/libscotchmetis/library_parmetis.h
-@@ -106,6 +106,7 @@ typedef enum {
- */
-
- #ifndef SCOTCH_H /* In case "scotch.h" not included before */
-+#include <stdint.h>
- typedef DUMMYINT SCOTCH_Num;
- #endif /* SCOTCH_H */
-
diff --git a/gnu/packages/patches/spectre-meltdown-checker-externalize-fwdb.patch b/gnu/packages/patches/spectre-meltdown-checker-externalize-fwdb.patch
new file mode 100644
index 0000000000..cce70b880f
--- /dev/null
+++ b/gnu/packages/patches/spectre-meltdown-checker-externalize-fwdb.patch
@@ -0,0 +1,244 @@
+From 340b08737e552c3c186863d76d123808d853a159 Mon Sep 17 00:00:00 2001
+From: Hilton Chain <hako@ultrarare.space>
+Date: Sat, 12 Nov 2022 22:45:24 +0800
+Subject: [PATCH] Replace fwdb downloader with a local file option.
+
+Also warn about non-free software.
+---
+ spectre-meltdown-checker.sh | 180 +++---------------------------------
+ 1 file changed, 15 insertions(+), 165 deletions(-)
+
+diff --git a/spectre-meltdown-checker.sh b/spectre-meltdown-checker.sh
+index 30f760c..ce46970 100755
+--- a/spectre-meltdown-checker.sh
++++ b/spectre-meltdown-checker.sh
+@@ -22,8 +22,6 @@ exit_cleanup()
+ [ -n "${dumped_config:-}" ] && [ -f "$dumped_config" ] && rm -f "$dumped_config"
+ [ -n "${kerneltmp:-}" ] && [ -f "$kerneltmp" ] && rm -f "$kerneltmp"
+ [ -n "${kerneltmp2:-}" ] && [ -f "$kerneltmp2" ] && rm -f "$kerneltmp2"
+- [ -n "${mcedb_tmp:-}" ] && [ -f "$mcedb_tmp" ] && rm -f "$mcedb_tmp"
+- [ -n "${intel_tmp:-}" ] && [ -d "$intel_tmp" ] && rm -rf "$intel_tmp"
+ [ "${mounted_debugfs:-}" = 1 ] && umount /sys/kernel/debug 2>/dev/null
+ [ "${mounted_procfs:-}" = 1 ] && umount "$procfs" 2>/dev/null
+ [ "${insmod_cpuid:-}" = 1 ] && rmmod cpuid 2>/dev/null
+@@ -93,9 +91,9 @@ show_usage()
+ --vmm [auto,yes,no] override the detection of the presence of a hypervisor, default: auto
+ --allow-msr-write allow probing for write-only MSRs, this might produce kernel logs or be blocked by your system
+ --cpu [#,all] interact with CPUID and MSR of CPU core number #, or all (default: CPU core 0)
+- --update-fwdb update our local copy of the CPU microcodes versions database (using the awesome
+- MCExtractor project and the Intel firmwares GitHub repository)
+- --update-builtin-fwdb same as --update-fwdb but update builtin DB inside the script itself
++ --with-fwdb FILE read CPU microcode version information from FILE
++ Note that most CPU microcode is distributed as binaries without source -- relying on
++ such non-free firmware as sole protection against security vulnerabilities is ill-advised.
+ --dump-mock-data used to mimick a CPU on an other system, mainly used to help debugging this script
+
+ Return codes:
+@@ -837,147 +833,6 @@ show_header()
+ _info
+ }
+
+-[ -z "$HOME" ] && HOME="$(getent passwd "$(whoami)" | cut -d: -f6)"
+-mcedb_cache="$HOME/.mcedb"
+-update_fwdb()
+-{
+- show_header
+-
+- set -e
+-
+- if [ -r "$mcedb_cache" ]; then
+- previous_dbversion=$(awk '/^# %%% MCEDB / { print $4 }' "$mcedb_cache")
+- fi
+-
+- # first, download the MCE.db from the excellent platomav's MCExtractor project
+- mcedb_tmp="$(mktemp -t smc-mcedb-XXXXXX)"
+- mcedb_url='https://github.com/platomav/MCExtractor/raw/master/MCE.db'
+- _info_nol "Fetching MCE.db from the MCExtractor project... "
+- if command -v wget >/dev/null 2>&1; then
+- wget -q "$mcedb_url" -O "$mcedb_tmp"; ret=$?
+- elif command -v curl >/dev/null 2>&1; then
+- curl -sL "$mcedb_url" -o "$mcedb_tmp"; ret=$?
+- elif command -v fetch >/dev/null 2>&1; then
+- fetch -q "$mcedb_url" -o "$mcedb_tmp"; ret=$?
+- else
+- echo ERROR "please install one of \`wget\`, \`curl\` of \`fetch\` programs"
+- return 1
+- fi
+- if [ "$ret" != 0 ]; then
+- echo ERROR "error $ret while downloading MCE.db"
+- return $ret
+- fi
+- echo DONE
+-
+- # second, get the Intel firmwares from GitHub
+- intel_tmp="$(mktemp -d -t smc-intelfw-XXXXXX)"
+- intel_url="https://github.com/intel/Intel-Linux-Processor-Microcode-Data-Files/archive/main.zip"
+- _info_nol "Fetching Intel firmwares... "
+- ## https://github.com/intel/Intel-Linux-Processor-Microcode-Data-Files.git
+- if command -v wget >/dev/null 2>&1; then
+- wget -q "$intel_url" -O "$intel_tmp/fw.zip"; ret=$?
+- elif command -v curl >/dev/null 2>&1; then
+- curl -sL "$intel_url" -o "$intel_tmp/fw.zip"; ret=$?
+- elif command -v fetch >/dev/null 2>&1; then
+- fetch -q "$intel_url" -o "$intel_tmp/fw.zip"; ret=$?
+- else
+- echo ERROR "please install one of \`wget\`, \`curl\` of \`fetch\` programs"
+- return 1
+- fi
+- if [ "$ret" != 0 ]; then
+- echo ERROR "error $ret while downloading Intel firmwares"
+- return $ret
+- fi
+- echo DONE
+-
+- # now extract MCEdb contents using sqlite
+- _info_nol "Extracting MCEdb data... "
+- if ! command -v sqlite3 >/dev/null 2>&1; then
+- echo ERROR "please install the \`sqlite3\` program"
+- return 1
+- fi
+- mcedb_revision=$(sqlite3 "$mcedb_tmp" "select revision from MCE")
+- if [ -z "$mcedb_revision" ]; then
+- echo ERROR "downloaded file seems invalid"
+- return 1
+- fi
+- sqlite3 "$mcedb_tmp" "alter table Intel add column origin text"
+- sqlite3 "$mcedb_tmp" "update Intel set origin='mce'"
+-
+- echo OK "MCExtractor database revision $mcedb_revision"
+-
+- # parse Intel firmwares to get their versions
+- _info_nol "Integrating Intel firmwares data to db... "
+- if ! command -v unzip >/dev/null 2>&1; then
+- echo ERROR "please install the \`unzip\` program"
+- return 1
+- fi
+- ( cd "$intel_tmp" && unzip fw.zip >/dev/null; )
+- if ! [ -d "$intel_tmp/Intel-Linux-Processor-Microcode-Data-Files-main/intel-ucode" ]; then
+- echo ERROR "expected the 'intel-ucode' folder in the downloaded zip file"
+- return 1
+- fi
+-
+- if ! command -v iucode_tool >/dev/null 2>&1; then
+- if ! command -v iucode-tool >/dev/null 2>&1; then
+- echo ERROR "please install the \`iucode-tool\` program"
+- return 1
+- else
+- iucode_tool="iucode-tool"
+- fi
+- else
+- iucode_tool="iucode_tool"
+- fi
+- # 079/001: sig 0x000106c2, pf_mask 0x01, 2009-04-10, rev 0x0217, size 5120
+- # 078/004: sig 0x000106ca, pf_mask 0x10, 2009-08-25, rev 0x0107, size 5120
+- $iucode_tool -l "$intel_tmp/Intel-Linux-Processor-Microcode-Data-Files-main/intel-ucode" | grep -wF sig | while read -r _line
+- do
+- _line=$( echo "$_line" | tr -d ',')
+- _cpuid=$( echo "$_line" | awk '{print $3}')
+- _cpuid=$(( _cpuid ))
+- _cpuid=$(printf "0x%08X" "$_cpuid")
+- _date=$( echo "$_line" | awk '{print $6}' | tr -d '-')
+- _version=$(echo "$_line" | awk '{print $8}')
+- _version=$(( _version ))
+- _version=$(printf "0x%08X" "$_version")
+- _sqlstm="$(printf "INSERT INTO Intel (origin,cpuid,version,yyyymmdd) VALUES (\"%s\",\"%s\",\"%s\",\"%s\");" "intel" "$(printf "%08X" "$_cpuid")" "$(printf "%08X" "$_version")" "$_date")"
+- sqlite3 "$mcedb_tmp" "$_sqlstm"
+- done
+- _intel_timestamp=$(stat -c %Y "$intel_tmp/Intel-Linux-Processor-Microcode-Data-Files-main/license" 2>/dev/null)
+- if [ -n "$_intel_timestamp" ]; then
+- # use this date, it matches the last commit date
+- _intel_latest_date=$(date +%Y%m%d -d @"$_intel_timestamp")
+- else
+- echo "Falling back to the latest microcode date"
+- _intel_latest_date=$(sqlite3 "$mcedb_tmp" "SELECT yyyymmdd from Intel WHERE origin = 'intel' ORDER BY yyyymmdd DESC LIMIT 1;")
+- fi
+- echo DONE "(version $_intel_latest_date)"
+-
+- dbversion="$mcedb_revision+i$_intel_latest_date"
+-
+- if [ "$1" != builtin ] && [ -n "$previous_dbversion" ] && [ "$previous_dbversion" = "v$dbversion" ]; then
+- echo "We already have this version locally, no update needed"
+- return 0
+- fi
+-
+- _info_nol "Building local database... "
+- {
+- echo "# Spectre & Meltdown Checker";
+- echo "# %%% MCEDB v$dbversion";
+- sqlite3 "$mcedb_tmp" "SELECT '# I,0x'||t1.cpuid||',0x'||MAX(t1.version)||','||t1.yyyymmdd FROM Intel AS t1 LEFT OUTER JOIN Intel AS t2 ON t2.cpuid=t1.cpuid AND t2.yyyymmdd > t1.yyyymmdd WHERE t2.yyyymmdd IS NULL GROUP BY t1.cpuid ORDER BY t1.cpuid ASC;" | grep -v '^# .,0x00000000,';
+- sqlite3 "$mcedb_tmp" "SELECT '# A,0x'||t1.cpuid||',0x'||MAX(t1.version)||','||t1.yyyymmdd FROM AMD AS t1 LEFT OUTER JOIN AMD AS t2 ON t2.cpuid=t1.cpuid AND t2.yyyymmdd > t1.yyyymmdd WHERE t2.yyyymmdd IS NULL GROUP BY t1.cpuid ORDER BY t1.cpuid ASC;" | grep -v '^# .,0x00000000,';
+- } > "$mcedb_cache"
+- echo DONE "(version $dbversion)"
+-
+- if [ "$1" = builtin ]; then
+- newfile=$(mktemp -t smc-builtin-XXXXXX)
+- awk '/^# %%% MCEDB / { exit }; { print }' "$0" > "$newfile"
+- awk '{ if (NR>1) { print } }' "$mcedb_cache" >> "$newfile"
+- cat "$newfile" > "$0"
+- rm -f "$newfile"
+- fi
+-}
+-
+ parse_opt_file()
+ {
+ # parse_opt_file option_name option_value
+@@ -1067,12 +922,15 @@ while [ -n "${1:-}" ]; do
+ # deprecated, kept for compatibility
+ opt_explain=0
+ shift
+- elif [ "$1" = "--update-fwdb" ] || [ "$1" = "--update-mcedb" ]; then
+- update_fwdb
+- exit $?
+- elif [ "$1" = "--update-builtin-fwdb" ] || [ "$1" = "--update-builtin-mcedb" ]; then
+- update_fwdb builtin
+- exit $?
++ elif [ "$1" = "--with-fwdb" ] || [ "$1" = "--with-mcedb" ]; then
++ opt_fwdb=$2
++ if [ -f "$opt_fwdb" ]; then
++ mcedb_cache=$2
++ else
++ echo "$0: error: --with-fwdb should be a file, got '$opt_fwdb'" >&2
++ exit 255
++ fi
++ shift 2
+ elif [ "$1" = "--dump-mock-data" ]; then
+ opt_mock=1
+ shift
+@@ -2033,21 +1891,11 @@ is_xen_domU()
+ fi
+ }
+
+-builtin_dbversion=$(awk '/^# %%% MCEDB / { print $4 }' "$0")
+ if [ -r "$mcedb_cache" ]; then
+ # we have a local cache file, but it might be older than the builtin version we have
+ local_dbversion=$( awk '/^# %%% MCEDB / { print $4 }' "$mcedb_cache")
+- # sort -V sorts by version number
+- older_dbversion=$(printf "%b\n%b" "$local_dbversion" "$builtin_dbversion" | sort -V | head -n1)
+- if [ "$older_dbversion" = "$builtin_dbversion" ]; then
+- mcedb_source="$mcedb_cache"
+- mcedb_info="local firmwares DB $local_dbversion"
+- fi
+-fi
+-# if mcedb_source is not set, either we don't have a local cached db, or it is older than the builtin db
+-if [ -z "${mcedb_source:-}" ]; then
+- mcedb_source="$0"
+- mcedb_info="builtin firmwares DB $builtin_dbversion"
++ mcedb_source="$mcedb_cache"
++ mcedb_info="local firmwares DB $local_dbversion"
+ fi
+ read_mcedb()
+ {
+@@ -2063,7 +1911,9 @@ is_latest_known_ucode()
+ return 2
+ fi
+ ucode_latest="latest microcode version for your CPU model is unknown"
+- if is_intel; then
++ if [ -z "$mcedb_source" ]; then
++ return 2
++ elif is_intel; then
+ cpu_brand_prefix=I
+ elif is_amd; then
+ cpu_brand_prefix=A
+--
+2.38.1
+
diff --git a/gnu/packages/patches/spectre-meltdown-checker-find-kernel.patch b/gnu/packages/patches/spectre-meltdown-checker-find-kernel.patch
new file mode 100644
index 0000000000..c0e24d8eed
--- /dev/null
+++ b/gnu/packages/patches/spectre-meltdown-checker-find-kernel.patch
@@ -0,0 +1,26 @@
+From 5b757d930ec0cf102b03fb9817d17e06c72e74b3 Mon Sep 17 00:00:00 2001
+From: Hilton Chain <hako@ultrarare.space>
+Date: Sat, 5 Nov 2022 23:22:31 +0800
+Subject: [PATCH] Locate the kernel bzimage used by Guix System
+
+---
+ spectre-meltdown-checker.sh | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/spectre-meltdown-checker.sh b/spectre-meltdown-checker.sh
+index 248a444..855a090 100755
+--- a/spectre-meltdown-checker.sh
++++ b/spectre-meltdown-checker.sh
+@@ -2251,6 +2251,8 @@ if [ "$opt_live" = 1 ]; then
+ [ -e "/boot/kernel-genkernel-$(uname -m)-$(uname -r)" ] && opt_kernel="/boot/kernel-genkernel-$(uname -m)-$(uname -r)"
+ # NixOS:
+ [ -e "/run/booted-system/kernel" ] && opt_kernel="/run/booted-system/kernel"
++ # Guix System:
++ [ -e "/run/booted-system/kernel/bzImage" ] && opt_kernel="/run/booted-system/kernel/bzImage"
+ # systemd kernel-install:
+ [ -e "/etc/machine-id" ] && [ -e "/boot/$(cat /etc/machine-id)/$(uname -r)/linux" ] && opt_kernel="/boot/$(cat /etc/machine-id)/$(uname -r)/linux"
+ # Clear Linux:
+
+base-commit: a6c943d38f315f339697ec26e7374a09b88f2183
+--
+2.38.0
diff --git a/gnu/packages/patches/sssd-optional-systemd.patch b/gnu/packages/patches/sssd-optional-systemd.patch
deleted file mode 100644
index 0784fdc7aa..0000000000
--- a/gnu/packages/patches/sssd-optional-systemd.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-Allow running sss_analyze without Python modules for systemd.
-Upstream PR: https://github.com/SSSD/sssd/pull/6125
-
-diff --git a/src/tools/analyzer/modules/request.py b/src/tools/analyzer/modules/request.py
-index b96a23c05..28ac2f194 100644
---- a/src/tools/analyzer/modules/request.py
-+++ b/src/tools/analyzer/modules/request.py
-@@ -1,8 +1,6 @@
- import re
- import logging
-
--from sssd.source_files import Files
--from sssd.source_journald import Journald
- from sssd.parser import SubparsersAction
- from sssd.parser import Option
-
-@@ -77,8 +75,10 @@ class RequestAnalyzer:
- Instantiated source object
- """
- if args.source == "journald":
-+ from sssd.source_journald import Journald
- source = Journald()
- else:
-+ from sssd.source_files import Files
- source = Files(args.logdir)
- return source
-
-@@ -143,7 +143,7 @@ class RequestAnalyzer:
- self.consumed_logs.append(line.rstrip(line[-1]))
- else:
- # files source includes newline
-- if isinstance(source, Files):
-+ if type(source).__name__ == 'Files':
- print(line, end='')
- else:
- print(line)
-@@ -225,7 +225,7 @@ class RequestAnalyzer:
- source.set_component(component, False)
- self.done = ""
- for line in self.matched_line(source, patterns):
-- if isinstance(source, Journald):
-+ if type(source).__name__ == 'Journald':
- print(line)
- else:
- self.print_formatted(line, args.verbose)
diff --git a/gnu/packages/patches/tbb-fix-test-on-aarch64.patch b/gnu/packages/patches/tbb-fix-test-on-aarch64.patch
deleted file mode 100644
index 3a6003591e..0000000000
--- a/gnu/packages/patches/tbb-fix-test-on-aarch64.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From 013035b4e9af39f506e87ae6b755c3363e768d4d Mon Sep 17 00:00:00 2001
-From: Vladislav Shchapov <phprus@gmail.com>
-Date: Thu, 23 Dec 2021 19:17:24 +0500
-Subject: [PATCH] Fix issue #687 (test_eh_thread) (#697)
-
-Signed-off-by: Vladislav Shchapov <phprus@gmail.com>
----
- test/tbb/test_eh_thread.cpp | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/test/tbb/test_eh_thread.cpp b/test/tbb/test_eh_thread.cpp
-index aa6d764d..af291f48 100644
---- a/test/tbb/test_eh_thread.cpp
-+++ b/test/tbb/test_eh_thread.cpp
-@@ -36,6 +36,7 @@
- // TODO: enable limitThreads with sanitizer under docker
- #if TBB_USE_EXCEPTIONS && !_WIN32 && !__ANDROID__
-
-+#include <limits.h>
- #include <sys/types.h>
- #include <sys/time.h>
- #include <sys/resource.h>
-@@ -73,7 +74,8 @@ public:
- mValid = false;
- pthread_attr_t attr;
- // Limit the stack size not to consume all virtual memory on 32 bit platforms.
-- if (pthread_attr_init(&attr) == 0 && pthread_attr_setstacksize(&attr, 100*1024) == 0) {
-+ std::size_t stacksize = utils::max(128*1024, PTHREAD_STACK_MIN);
-+ if (pthread_attr_init(&attr) == 0 && pthread_attr_setstacksize(&attr, stacksize) == 0) {
- mValid = pthread_create(&mHandle, &attr, thread_routine, /* arg = */ nullptr) == 0;
- }
- }
---
-2.34.0
-
diff --git a/gnu/packages/patches/tbb-other-arches.patch b/gnu/packages/patches/tbb-other-arches.patch
index b649eb3eb8..0ebfde165c 100644
--- a/gnu/packages/patches/tbb-other-arches.patch
+++ b/gnu/packages/patches/tbb-other-arches.patch
@@ -1,8 +1,8 @@
diff --git a/src/tbb/tools_api/ittnotify_config.h b/src/tbb/tools_api/ittnotify_config.h
-index 8ecc2378..0cf26f85 100644
+index f904a8e9..405f92e6 100644
--- a/src/tbb/tools_api/ittnotify_config.h
+++ b/src/tbb/tools_api/ittnotify_config.h
-@@ -159,6 +159,11 @@
+@@ -163,6 +163,11 @@
# define ITT_ARCH_ARM64 6
#endif /* ITT_ARCH_ARM64 */
@@ -14,7 +14,7 @@ index 8ecc2378..0cf26f85 100644
#ifndef ITT_ARCH
# if defined _M_IX86 || defined __i386__
# define ITT_ARCH ITT_ARCH_IA32
-@@ -172,6 +177,8 @@
+@@ -176,6 +181,8 @@
# define ITT_ARCH ITT_ARCH_ARM64
# elif defined __powerpc64__
# define ITT_ARCH ITT_ARCH_PPC64
@@ -24,7 +24,7 @@ index 8ecc2378..0cf26f85 100644
#endif
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
-index 9534dce8..51a79331 100644
+index 2aa669c9..f915e252 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -387,7 +387,10 @@ if (TARGET TBB::tbb)
diff --git a/gnu/packages/patches/telegram-desktop-allow-disable-libtgvoip.patch b/gnu/packages/patches/telegram-desktop-allow-disable-libtgvoip.patch
new file mode 100644
index 0000000000..3c062cbd9a
--- /dev/null
+++ b/gnu/packages/patches/telegram-desktop-allow-disable-libtgvoip.patch
@@ -0,0 +1,125 @@
+From 4d1a8351ee82728912fcf7ad0070049b2910c393 Mon Sep 17 00:00:00 2001
+From: Klemens Nanni <klemens@posteo.de>
+Date: Wed, 2 Mar 2022 01:07:48 +0100
+Subject: [PATCH] Introduce TDESKTOP_DISABLE_LEGACY_TGVOIP
+
+Originally from Alt Linux[0], OpenBSD has so far adapted the removal of
+tgvoip in the official net/tdesktop build.
+
+tgcalls provides everything needed for calls; audio/video/desktop
+sharing calls have been working fine across different operating systems
+and telegram desktop/mobile versions without problems.
+
+0: http://www.sisyphus.ru/cgi-bin/srpm.pl/Sisyphus/telegram-desktop/getpatch/1
+---
+ Telegram/CMakeLists.txt | 6 +++---
+ Telegram/SourceFiles/calls/calls_call.cpp | 6 ++++++
+ Telegram/cmake/lib_tgcalls.cmake | 4 ++++
+ Telegram/cmake/telegram_options.cmake | 8 ++++++++
+ 4 files changed, 21 insertions(+), 3 deletions(-)
+
+diff --git a/Telegram/CMakeLists.txt b/Telegram/CMakeLists.txt
+index fb2bf370f..5d9578f2d 100644
+--- a/Telegram/CMakeLists.txt
++++ b/Telegram/CMakeLists.txt
+@@ -28,7 +28,9 @@ get_filename_component(res_loc Resources REALPATH)
+ include(cmake/telegram_options.cmake)
+ include(cmake/lib_ffmpeg.cmake)
+ include(cmake/lib_stripe.cmake)
+-include(cmake/lib_tgvoip.cmake)
++if (NOT TDESKTOP_DISABLE_LEGACY_TGVOIP)
++ include(cmake/lib_tgvoip.cmake)
++endif()
+ include(cmake/lib_tgcalls.cmake)
+ include(cmake/td_export.cmake)
+ include(cmake/td_mtproto.cmake)
+@@ -52,9 +54,7 @@ target_prepare_qrc(Telegram)
+
+ target_link_libraries(Telegram
+ PRIVATE
+- tdesktop::lib_tgcalls_legacy
+ tdesktop::lib_tgcalls
+- tdesktop::lib_tgvoip
+
+ # Order in this list defines the order of include paths in command line.
+ # We need to place desktop-app::external_minizip this early to have its
+diff --git a/Telegram/SourceFiles/calls/calls_call.cpp b/Telegram/SourceFiles/calls/calls_call.cpp
+index 6894d5d90..cd03620e7 100644
+--- a/Telegram/SourceFiles/calls/calls_call.cpp
++++ b/Telegram/SourceFiles/calls/calls_call.cpp
+@@ -39,8 +39,10 @@ class InstanceImpl;
+ class InstanceV2Impl;
+ class InstanceV2ReferenceImpl;
+ class InstanceV2_4_0_0Impl;
++#ifndef TDESKTOP_DISABLE_LEGACY_TGVOIP
+ class InstanceImplLegacy;
+ void SetLegacyGlobalServerConfig(const std::string &serverConfig);
++#endif
+ } // namespace tgcalls
+
+ namespace Calls {
+@@ -56,7 +58,9 @@ const auto Register = tgcalls::Register<tgcalls::InstanceImpl>();
+ const auto RegisterV2 = tgcalls::Register<tgcalls::InstanceV2Impl>();
+ const auto RegV2Ref = tgcalls::Register<tgcalls::InstanceV2ReferenceImpl>();
+ const auto RegisterV240 = tgcalls::Register<tgcalls::InstanceV2_4_0_0Impl>();
++#ifndef TDESKTOP_DISABLE_LEGACY_TGVOIP
+ const auto RegisterLegacy = tgcalls::Register<tgcalls::InstanceImplLegacy>();
++#endif
+
+ [[nodiscard]] base::flat_set<int64> CollectEndpointIds(
+ const QVector<MTPPhoneConnection> &list) {
+@@ -1322,7 +1326,9 @@ Call::~Call() {
+ }
+
+ void UpdateConfig(const std::string &data) {
++#ifndef TDESKTOP_DISABLE_LEGACY_TGVOIP
+ tgcalls::SetLegacyGlobalServerConfig(data);
++#endif
+ }
+
+ } // namespace Calls
+diff --git a/Telegram/cmake/lib_tgcalls.cmake b/Telegram/cmake/lib_tgcalls.cmake
+index 34a5ba418..8a784be2c 100644
+--- a/Telegram/cmake/lib_tgcalls.cmake
++++ b/Telegram/cmake/lib_tgcalls.cmake
+@@ -267,6 +267,10 @@ PRIVATE
+ ${tgcalls_loc}
+ )
+
++if (TDESKTOP_DISABLE_LEGACY_TGVOIP)
++ return()
++endif()
++
+ add_library(lib_tgcalls_legacy STATIC)
+ init_target(lib_tgcalls_legacy)
+
+diff --git a/Telegram/cmake/telegram_options.cmake b/Telegram/cmake/telegram_options.cmake
+index 1c3c25431..033f2bc95 100644
+--- a/Telegram/cmake/telegram_options.cmake
++++ b/Telegram/cmake/telegram_options.cmake
+@@ -4,7 +4,9 @@
+ # For license and copyright information please follow this link:
+ # https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
+
++option(TDESKTOP_DISABLE_LEGACY_TGVOIP "Disable legacy tgvoip support." OFF)
+ option(TDESKTOP_API_TEST "Use test API credentials." OFF)
++
+ set(TDESKTOP_API_ID "0" CACHE STRING "Provide 'api_id' for the Telegram API access.")
+ set(TDESKTOP_API_HASH "" CACHE STRING "Provide 'api_hash' for the Telegram API access.")
+
+@@ -40,6 +42,12 @@ if (TDESKTOP_API_ID STREQUAL "0" OR TDESKTOP_API_HASH STREQUAL "")
+ " ")
+ endif()
+
++if (TDESKTOP_DISABLE_LEGACY_TGVOIP)
++ target_compile_definitions(Telegram PRIVATE TDESKTOP_DISABLE_LEGACY_TGVOIP)
++else()
++ target_link_libraries(Telegram PRIVATE tdesktop::lib_tgcalls_legacy tdesktop::lib_tgvoip)
++endif()
++
+ if (DESKTOP_APP_DISABLE_SPELLCHECK)
+ target_compile_definitions(Telegram PRIVATE TDESKTOP_DISABLE_SPELLCHECK)
+ else()
+--
+2.37.3
+
diff --git a/gnu/packages/patches/timescaledb-flaky-test.patch b/gnu/packages/patches/timescaledb-flaky-test.patch
deleted file mode 100644
index 6268bcecad..0000000000
--- a/gnu/packages/patches/timescaledb-flaky-test.patch
+++ /dev/null
@@ -1,107 +0,0 @@
-Use fixed dates in test for consistent results.
-
-Taken from upstream:
-
- https://github.com/timescale/timescaledb/commit/1d0670e703862b284c241ab797404f851b25b5df
-
-diff --git a/test/expected/copy-12.out b/test/expected/copy-12.out
-index 5cb28a45a2..37abf6f6ff 100644
---- a/test/expected/copy-12.out
-+++ b/test/expected/copy-12.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
-- INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
- count
- -------
-- 697
-+ 721
- (1 row)
-
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
- count
- -------
-- 697
-+ 721
- (1 row)
-
- ----------------------------------------------------------------
-diff --git a/test/expected/copy-13.out b/test/expected/copy-13.out
-index 02bf913eff..89e16fe8e2 100644
---- a/test/expected/copy-13.out
-+++ b/test/expected/copy-13.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
-- INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
- count
- -------
-- 697
-+ 721
- (1 row)
-
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
- count
- -------
-- 697
-+ 721
- (1 row)
-
- ----------------------------------------------------------------
-diff --git a/test/expected/copy-14.out b/test/expected/copy-14.out
-index 02bf913eff..89e16fe8e2 100644
---- a/test/expected/copy-14.out
-+++ b/test/expected/copy-14.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
-- INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
- count
- -------
-- 697
-+ 721
- (1 row)
-
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
- count
- -------
-- 697
-+ 721
- (1 row)
-
- ----------------------------------------------------------------
-diff --git a/test/sql/copy.sql.in b/test/sql/copy.sql.in
-index 91402c2ab8..bba4265064 100644
---- a/test/sql/copy.sql.in
-+++ b/test/sql/copy.sql.in
-@@ -276,8 +276,7 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
-- INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
-
- SELECT COUNT(*) FROM hyper_copy_large;
diff --git a/gnu/packages/patches/timewarrior-time-sensitive-tests.patch b/gnu/packages/patches/timewarrior-time-sensitive-tests.patch
new file mode 100644
index 0000000000..586d1aa261
--- /dev/null
+++ b/gnu/packages/patches/timewarrior-time-sensitive-tests.patch
@@ -0,0 +1,163 @@
+From: Gordon Ball <gordon@chronitis.net>
+Date: Sat, 23 Nov 2019 18:59:39 +0000
+Subject: skip tests which are sensitive to server time
+
+---
+ test/continue.t | 2 +-
+ test/export.t | 1 +
+ test/help.t | 1 +
+ test/lengthen.t | 1 +
+ test/move.t | 2 ++
+ test/run_all | 2 +-
+ test/shorten.t | 1 +
+ test/summary.t | 3 +++
+ test/tag.t | 1 +
+ test/tags.t | 1 +
+ 10 files changed, 13 insertions(+), 2 deletions(-)
+
+diff --git a/test/continue.t b/test/continue.t
+index 917699e..428f714 100755
+--- a/test/continue.t
++++ b/test/continue.t
+@@ -37,7 +37,7 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
+
+ from basetest import Timew, TestCase
+
+-
++@unittest.skip("Time-of-day sensitive")
+ class TestContinue(TestCase):
+ def setUp(self):
+ """Executed before each test in the class"""
+diff --git a/test/export.t b/test/export.t
+index c6726e6..8b511ff 100755
+--- a/test/export.t
++++ b/test/export.t
+@@ -62,6 +62,7 @@ class TestExport(TestCase):
+ expectedEnd=now_utc,
+ expectedTags=["foo"])
+
++ @unittest.skip("flaky")
+ def test_changing_exclusion_does_not_change_flattened_intervals(self):
+ """Changing exclusions does not change flattened intervals"""
+ now = datetime.now()
+diff --git a/test/help.t b/test/help.t
+index 786def0..9ecf5e6 100755
+--- a/test/help.t
++++ b/test/help.t
+@@ -58,6 +58,7 @@ class TestHelp(TestCase):
+ code, out2, err2 = self.t("-h")
+ self.assertEqual(out1, out2)
+
++ @unittest.skip("flaky")
+ def test_help_with_command_should_show_man_page(self):
+ """timew help with command should show man page"""
+ code, out, err = self.t("help start")
+diff --git a/test/lengthen.t b/test/lengthen.t
+index a6f1d77..f9ab54d 100755
+--- a/test/lengthen.t
++++ b/test/lengthen.t
+@@ -55,6 +55,7 @@ class TestLengthen(TestCase):
+ code, out, err = self.t.runError("lengthen @1 10mins")
+ self.assertIn('Cannot lengthen open interval @1', err)
+
++ @unittest.skip("time sensitive")
+ def test_lengthen_synthetic_interval(self):
+ """Lengthen a synthetic interval."""
+ now = datetime.now()
+diff --git a/test/move.t b/test/move.t
+index 2d7fdd9..b2336f5 100755
+--- a/test/move.t
++++ b/test/move.t
+@@ -145,6 +145,7 @@ class TestMove(TestCase):
+ expectedEnd="20170301T143000Z",
+ expectedTags=["bar"])
+
++ @unittest.skip("time sensitive")
+ def test_move_synthetic_interval_into_exclusion(self):
+ """Move a synthetic interval into exclusion"""
+ now = datetime.now()
+@@ -175,6 +176,7 @@ class TestMove(TestCase):
+ expectedTags=[],
+ description="unmodified interval")
+
++ @unittest.skip("time sensitive")
+ def test_move_synthetic_interval_away_from_exclusion(self):
+ """Move a synthetic interval away from exclusion"""
+ now = datetime.now()
+diff --git a/test/run_all b/test/run_all
+index ea7dd8f..afc6731 100755
+--- a/test/run_all
++++ b/test/run_all
+@@ -14,7 +14,7 @@ from queue import Queue, Empty
+ from subprocess import call, Popen, PIPE
+ from threading import Thread
+
+-TIMEOUT = .2
++TIMEOUT = 2.
+
+
+ def run_test(testqueue, outqueue, threadname):
+diff --git a/test/shorten.t b/test/shorten.t
+index 7058cc0..94e0067 100755
+--- a/test/shorten.t
++++ b/test/shorten.t
+@@ -69,6 +69,7 @@ class TestShorten(TestCase):
+ self.t("move @1 20170308T113000")
+ self.t("shorten @1 5min") # Does not work.
+
++ @unittest.skip("time sensitive")
+ def test_shorten_synthetic_interval(self):
+ """Shorten a synthetic interval."""
+ now = datetime.now()
+diff --git a/test/summary.t b/test/summary.t
+index 6fd3352..f91e992 100755
+--- a/test/summary.t
++++ b/test/summary.t
+@@ -192,6 +192,7 @@ W10 2017-03-09 Thu @4 Tag1 8:43:08 9:38:15 0:55:07
+ 1:09:03
+ """, out)
+
++ @unittest.skip("fails w1-9")
+ def test_with_all_hint(self):
+ """Summary should work with :all hint"""
+ now = datetime.now()
+@@ -236,6 +237,7 @@ W{5} {2:%Y-%m-%d} {2:%a} @1 BAZ 10:00:00 11:00:00 1:00:00 1:00:00
+ self.assertIn("@1", out)
+ self.assertRegex(out, r'\s{30}0:00:02')
+
++ @unittest.skip("fails w1-9")
+ def test_with_named_date_yesterday(self):
+ """Summary should work with 'yesterday'"""
+ now = datetime.now()
+@@ -260,6 +262,7 @@ W{1} {0:%Y-%m-%d} {0:%a} @3 FOO 10:00:00 11:00:00 1:00:00 1:00:00
+ {2} 1:00:00
+ """.format(yesterday, week_yesterday, " " if two_digit_week is True else "", "-" if two_digit_week is True else ""), out)
+
++ @unittest.skip("fails w1-9")
+ def test_with_named_date_today(self):
+ """Summary should work with 'today'"""
+ now = datetime.now()
+diff --git a/test/tag.t b/test/tag.t
+index 8b2f847..21b8c16 100755
+--- a/test/tag.t
++++ b/test/tag.t
+@@ -178,6 +178,7 @@ class TestTag(TestCase):
+ self.assertClosedInterval(j[0], expectedTags=["bar", "foo", "one"])
+ self.assertClosedInterval(j[1], expectedTags=["bar", "foo", "two"])
+
++ @unittest.skip("time sensitive")
+ def test_tag_synthetic_interval(self):
+ """Tag a synthetic interval."""
+ now = datetime.now()
+diff --git a/test/tags.t b/test/tags.t
+index 6cfe143..4f84d06 100755
+--- a/test/tags.t
++++ b/test/tags.t
+@@ -63,6 +63,7 @@ class TestTags(TestCase):
+ self.assertIn('foo', out)
+ self.assertIn('bar', out)
+
++ @unittest.skip("time sensitive")
+ def test_tags_filtered(self):
+ """Test that tags command filtering excludes tags that are outside the filter range"""
+ self.t("track 20160101T0100 - 20160101T1000 foo")
diff --git a/gnu/packages/patches/tinydir-fix-cbehave-test.patch b/gnu/packages/patches/tinydir-fix-cbehave-test.patch
new file mode 100644
index 0000000000..84ecee12c2
--- /dev/null
+++ b/gnu/packages/patches/tinydir-fix-cbehave-test.patch
@@ -0,0 +1,16 @@
+Make test work with upstream cbehave
+(tinydir bundles a modified version)
+
+diff --git a/tests/file_open_test.c b/tests/file_open_test.c
+index 3e659bc..9f6f88d 100644
+--- a/tests/file_open_test.c
++++ b/tests/file_open_test.c
+@@ -19,4 +19,7 @@ FEATURE(file_open, "File open")
+ SCENARIO_END
+ FEATURE_END
+
+-CBEHAVE_RUN("File open:", TEST_FEATURE(file_open))
++int main(void) {
++ cbehave_feature _cfeatures[] = {{feature_idx(file_open)}};
++ return cbehave_runner("File open:", _cfeatures);
++}
diff --git a/gnu/packages/patches/u-boot-allow-disabling-openssl.patch b/gnu/packages/patches/u-boot-allow-disabling-openssl.patch
index 73e5878546..5f2856dbb4 100644
--- a/gnu/packages/patches/u-boot-allow-disabling-openssl.patch
+++ b/gnu/packages/patches/u-boot-allow-disabling-openssl.patch
@@ -5,6 +5,9 @@ Subject: [PATCH] Revert "tools: kwbimage: Do not hide usage of secure header
This reverts commit b4f3cc2c42d97967a3a3c8796c340f6b07ecccac.
+Addendum 2022-12-08, Ricardo Wurmus: This patch has been updated to introduce
+CONFIG_FIT_PRELOAD to remove fit_pre_load_data, which depends on openssl.
+
diff --git a/tools/kwbimage.c b/tools/kwbimage.c
index 94b7685392..eec599b0ee 100644
--- a/tools/kwbimage.c
@@ -137,3 +140,66 @@ index 94b7685392..eec599b0ee 100644
*imagesz = headersz;
+--- a/tools/image-host.c
++++ b/tools/image-host.c
+@@ -14,10 +14,12 @@
+ #include <image.h>
+ #include <version.h>
+
++#ifdef CONFIG_FIT_PRELOAD
+ #include <openssl/pem.h>
+ #include <openssl/evp.h>
+
+ #define IMAGE_PRE_LOAD_PATH "/image/pre-load/sig"
++#endif
+
+ /**
+ * fit_set_hash_value - set hash value in requested has node
+@@ -1116,6 +1118,7 @@
+ return 0;
+ }
+
++#ifdef CONFIG_FIT_PRELOAD
+ /*
+ * 0) open file (open)
+ * 1) read certificate (PEM_read_X509)
+@@ -1224,6 +1227,7 @@
+ out:
+ return ret;
+ }
++#endif
+
+ int fit_cipher_data(const char *keydir, void *keydest, void *fit,
+ const char *comment, int require_keys,
+--- a/tools/fit_image.c
++++ b/tools/fit_image.c
+@@ -59,9 +59,10 @@
+ ret = fit_set_timestamp(ptr, 0, time);
+ }
+
++#ifdef CONFIG_FIT_PRELOAD
+ if (!ret)
+ ret = fit_pre_load_data(params->keydir, dest_blob, ptr);
+-
++#endif
+ if (!ret) {
+ ret = fit_cipher_data(params->keydir, dest_blob, ptr,
+ params->comment,
+--- a/include/image.h
++++ b/include/image.h
+@@ -1090,6 +1090,7 @@
+
+ int fit_set_timestamp(void *fit, int noffset, time_t timestamp);
+
++#ifdef CONFIG_FIT_PRELOAD
+ /**
+ * fit_pre_load_data() - add public key to fdt blob
+ *
+@@ -1104,6 +1105,7 @@
+ * < 0, on failure
+ */
+ int fit_pre_load_data(const char *keydir, void *keydest, void *fit);
++#endif
+
+ int fit_cipher_data(const char *keydir, void *keydest, void *fit,
+ const char *comment, int require_keys,
diff --git a/gnu/packages/patches/u-boot-infodocs-target.patch b/gnu/packages/patches/u-boot-infodocs-target.patch
new file mode 100644
index 0000000000..5b21a99de3
--- /dev/null
+++ b/gnu/packages/patches/u-boot-infodocs-target.patch
@@ -0,0 +1,84 @@
+Upstream status: https://patchwork.ozlabs.org/project/uboot/list/?series=333259
+
+diff --git a/Makefile b/Makefile
+index de5746399a..597a8886c3 100644
+--- a/Makefile
++++ b/Makefile
+@@ -2372,7 +2372,7 @@ tcheck:
+ # Documentation targets
+ # ---------------------------------------------------------------------------
+ DOC_TARGETS := xmldocs latexdocs pdfdocs htmldocs epubdocs cleandocs \
+- linkcheckdocs dochelp refcheckdocs
++ linkcheckdocs dochelp refcheckdocs texinfodocs infodocs
+ PHONY += $(DOC_TARGETS)
+ $(DOC_TARGETS): scripts_basic FORCE
+ $(Q)$(MAKE) $(build)=doc $@
+diff --git a/doc/Makefile b/doc/Makefile
+index f5de65e927..d0904a9f99 100644
+--- a/doc/Makefile
++++ b/doc/Makefile
+@@ -69,6 +69,14 @@ quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(BUILDDIR)/$3/$4)
+ htmldocs:
+ @+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,html,$(var),,$(var)))
+
++texinfodocs:
++ @+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,texinfo,$(var),texinfo,$(var)))
++
++# Note: the 'info' Make target is generated by sphinx itself when
++# running the texinfodocs target defined above.
++infodocs: texinfodocs
++ $(MAKE) -C $(BUILDDIR)/texinfo info
++
+ linkcheckdocs:
+ @$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,linkcheck,$(var),,$(var)))
+
+@@ -109,6 +117,8 @@ cleandocs:
+ dochelp:
+ @echo ' U-Boot documentation in different formats from ReST:'
+ @echo ' htmldocs - HTML'
++ @echo ' texinfodocs - Texinfo'
++ @echo ' infodocs - Info'
+ @echo ' latexdocs - LaTeX'
+ @echo ' pdfdocs - PDF'
+ @echo ' epubdocs - EPUB'
+diff --git a/doc/conf.py b/doc/conf.py
+index 62c8d31270..3db70f80c1 100644
+--- a/doc/conf.py
++++ b/doc/conf.py
+@@ -449,7 +449,7 @@ for fn in os.listdir('.'):
+ # One entry per manual page. List of tuples
+ # (source start file, name, description, authors, manual section).
+ man_pages = [
+- (master_doc, 'dasuboot', 'The U-Boot Documentation',
++ (master_doc, 'u-boot', 'The U-Boot Documentation',
+ [author], 1)
+ ]
+
+@@ -463,8 +463,8 @@ man_pages = [
+ # (source start file, target name, title, author,
+ # dir menu entry, description, category)
+ texinfo_documents = [
+- (master_doc, 'DasUBoot', 'The U-Boot Documentation',
+- author, 'DasUBoot', 'One line description of project.',
++ (master_doc, 'u-boot', 'The U-Boot Documentation',
++ author, 'U-Boot', 'Boot loader for embedded systems',
+ 'Miscellaneous'),
+ ]
+
+diff --git a/doc/media/Makefile b/doc/media/Makefile
+index b9b43a34c3..9b32258696 100644
+--- a/doc/media/Makefile
++++ b/doc/media/Makefile
+@@ -22,10 +22,11 @@ $(BUILDDIR)/linker_lists.h.rst: ${API}/linker_lists.h ${PARSER} $(SRC_DIR)/linke
+
+ # Media build rules
+
+-.PHONY: all html epub xml latex
++.PHONY: all html texinfo epub xml latex
+
+ all: $(IMGDOT) $(BUILDDIR) ${TARGETS}
+ html: all
++texinfo: all
+ epub: all
+ xml: all
+ latex: $(IMGPDF) all
diff --git a/gnu/packages/patches/u-boot-patman-guix-integration.patch b/gnu/packages/patches/u-boot-patman-guix-integration.patch
new file mode 100644
index 0000000000..3472656c99
--- /dev/null
+++ b/gnu/packages/patches/u-boot-patman-guix-integration.patch
@@ -0,0 +1,1244 @@
+These changes correspond to commits 9ff7500ace..3154de3dd6 already merged to
+the u-boot-dm custodian repo (at
+https://source.denx.de/u-boot/custodians/u-boot-dm/-/commits/next), scheduled
+to be pulled after the next release.
+
+diff --git a/tools/patman/__init__.py b/tools/patman/__init__.py
+index c9d3e35052..1b98ec7fee 100644
+--- a/tools/patman/__init__.py
++++ b/tools/patman/__init__.py
+@@ -1,6 +1,6 @@
+ # SPDX-License-Identifier: GPL-2.0+
+
+ __all__ = ['checkpatch', 'command', 'commit', 'control', 'cros_subprocess',
+- 'func_test', 'get_maintainer', 'gitutil', 'main', 'patchstream',
++ 'func_test', 'get_maintainer', 'gitutil', '__main__', 'patchstream',
+ 'project', 'series', 'setup', 'settings', 'terminal',
+ 'test_checkpatch', 'test_util', 'tools', 'tout']
+diff --git a/tools/patman/main.py b/tools/patman/__main__.py
+similarity index 89%
+rename from tools/patman/main.py
+rename to tools/patman/__main__.py
+index 8067a288ab..749e6348b6 100755
+--- a/tools/patman/main.py
++++ b/tools/patman/__main__.py
+@@ -7,6 +7,7 @@
+ """See README for more information"""
+
+ from argparse import ArgumentParser
++import importlib.resources
+ import os
+ import re
+ import sys
+@@ -19,6 +20,7 @@ if __name__ == "__main__":
+
+ # Our modules
+ from patman import control
++from patman import func_test
+ from patman import gitutil
+ from patman import project
+ from patman import settings
+@@ -53,7 +55,8 @@ parser.add_argument('-H', '--full-help', action='store_true', dest='full_help',
+ default=False, help='Display the README file')
+
+ subparsers = parser.add_subparsers(dest='cmd')
+-send = subparsers.add_parser('send')
++send = subparsers.add_parser(
++ 'send', help='Format, check and email patches (default command)')
+ send.add_argument('-i', '--ignore-errors', action='store_true',
+ dest='ignore_errors', default=False,
+ help='Send patches email even if patch errors are found')
+@@ -62,6 +65,12 @@ send.add_argument('-l', '--limit-cc', dest='limit', type=int, default=None,
+ send.add_argument('-m', '--no-maintainers', action='store_false',
+ dest='add_maintainers', default=True,
+ help="Don't cc the file maintainers automatically")
++send.add_argument(
++ '--get-maintainer-script', dest='get_maintainer_script', type=str,
++ action='store',
++ default=os.path.join(gitutil.get_top_level(), 'scripts',
++ 'get_maintainer.pl') + ' --norolestats',
++ help='File name of the get_maintainer.pl (or compatible) script.')
+ send.add_argument('-n', '--dry-run', action='store_true', dest='dry_run',
+ default=False, help="Do a dry run (create but don't email patches)")
+ send.add_argument('-r', '--in-reply-to', type=str, action='store',
+@@ -94,9 +103,11 @@ send.add_argument('--smtp-server', type=str,
+
+ send.add_argument('patchfiles', nargs='*')
+
+-test_parser = subparsers.add_parser('test', help='Run tests')
+-test_parser.add_argument('testname', type=str, default=None, nargs='?',
+- help="Specify the test to run")
++# Only add the 'test' action if the test data files are available.
++if os.path.exists(func_test.TEST_DATA_DIR):
++ test_parser = subparsers.add_parser('test', help='Run tests')
++ test_parser.add_argument('testname', type=str, default=None, nargs='?',
++ help="Specify the test to run")
+
+ status = subparsers.add_parser('status',
+ help='Check status of patches in patchwork')
+@@ -113,7 +124,7 @@ status.add_argument('-f', '--force', action='store_true',
+ argv = sys.argv[1:]
+ args, rest = parser.parse_known_args(argv)
+ if hasattr(args, 'project'):
+- settings.Setup(gitutil, parser, args.project, '')
++ settings.Setup(parser, args.project)
+ args, rest = parser.parse_known_args(argv)
+
+ # If we have a command, it is safe to parse all arguments
+@@ -160,11 +171,8 @@ elif args.cmd == 'send':
+ fd.close()
+
+ elif args.full_help:
+- tools.print_full_help(
+- os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
+- 'README.rst')
+- )
+-
++ with importlib.resources.path('patman', 'README.rst') as readme:
++ tools.print_full_help(str(readme))
+ else:
+ # If we are not processing tags, no need to warning about bad ones
+ if not args.process_tags:
+diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py
+index d1b902dd96..012c0d895c 100644
+--- a/tools/patman/checkpatch.py
++++ b/tools/patman/checkpatch.py
+@@ -211,7 +211,7 @@ def check_patch(fname, verbose=False, show_types=False, use_tree=False):
+ stdout: Full output of checkpatch
+ """
+ chk = find_check_patch()
+- args = [chk]
++ args = [chk, '--u-boot', '--strict']
+ if not use_tree:
+ args.append('--no-tree')
+ if show_types:
+diff --git a/tools/patman/control.py b/tools/patman/control.py
+index bf426cf7bc..38e98dab84 100644
+--- a/tools/patman/control.py
++++ b/tools/patman/control.py
+@@ -94,8 +94,8 @@ def check_patches(series, patch_files, run_checkpatch, verbose, use_tree):
+
+
+ def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go,
+- ignore_bad_tags, add_maintainers, limit, dry_run, in_reply_to,
+- thread, smtp_server):
++ ignore_bad_tags, add_maintainers, get_maintainer_script, limit,
++ dry_run, in_reply_to, thread, smtp_server):
+ """Email patches to the recipients
+
+ This emails out the patches and cover letter using 'git send-email'. Each
+@@ -123,6 +123,8 @@ def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go,
+ ignore_bad_tags (bool): True to just print a warning for unknown tags,
+ False to halt with an error
+ add_maintainers (bool): Run the get_maintainer.pl script for each patch
++ get_maintainer_script (str): The script used to retrieve which
++ maintainers to cc
+ limit (int): Limit on the number of people that can be cc'd on a single
+ patch or the cover letter (None if no limit)
+ dry_run (bool): Don't actually email the patches, just print out what
+@@ -134,7 +136,7 @@ def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go,
+ smtp_server (str): SMTP server to use to send patches (None for default)
+ """
+ cc_file = series.MakeCcFile(process_tags, cover_fname, not ignore_bad_tags,
+- add_maintainers, limit)
++ add_maintainers, limit, get_maintainer_script)
+
+ # Email the patches out (giving the user time to check / cancel)
+ cmd = ''
+@@ -174,8 +176,8 @@ def send(args):
+ email_patches(
+ col, series, cover_fname, patch_files, args.process_tags,
+ its_a_go, args.ignore_bad_tags, args.add_maintainers,
+- args.limit, args.dry_run, args.in_reply_to, args.thread,
+- args.smtp_server)
++ args.get_maintainer_script, args.limit, args.dry_run,
++ args.in_reply_to, args.thread, args.smtp_server)
+
+ def patchwork_status(branch, count, start, end, dest_branch, force,
+ show_comments, url):
+diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py
+index 7b92bc67be..c25a47bdeb 100644
+--- a/tools/patman/func_test.py
++++ b/tools/patman/func_test.py
+@@ -6,7 +6,9 @@
+
+ """Functional tests for checking that patman behaves correctly"""
+
++import contextlib
+ import os
++import pathlib
+ import re
+ import shutil
+ import sys
+@@ -28,6 +30,21 @@ from patman.test_util import capture_sys_output
+ import pygit2
+ from patman import status
+
++PATMAN_DIR = pathlib.Path(__file__).parent
++TEST_DATA_DIR = PATMAN_DIR / 'test/'
++
++
++@contextlib.contextmanager
++def directory_excursion(directory):
++ """Change directory to `directory` for a limited to the context block."""
++ current = os.getcwd()
++ try:
++ os.chdir(directory)
++ yield
++ finally:
++ os.chdir(current)
++
++
+ class TestFunctional(unittest.TestCase):
+ """Functional tests for checking that patman behaves correctly"""
+ leb = (b'Lord Edmund Blackadd\xc3\xabr <weasel@blackadder.org>'.
+@@ -57,8 +74,7 @@ class TestFunctional(unittest.TestCase):
+ Returns:
+ str: Full path to file in the test directory
+ """
+- return os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
+- 'test', fname)
++ return TEST_DATA_DIR / fname
+
+ @classmethod
+ def _get_text(cls, fname):
+@@ -200,6 +216,8 @@ class TestFunctional(unittest.TestCase):
+ text = self._get_text('test01.txt')
+ series = patchstream.get_metadata_for_test(text)
+ cover_fname, args = self._create_patches_for_test(series)
++ get_maintainer_script = str(pathlib.Path(__file__).parent.parent.parent
++ / 'get_maintainer.pl') + ' --norolestats'
+ with capture_sys_output() as out:
+ patchstream.fix_patches(series, args)
+ if cover_fname and series.get('cover'):
+@@ -207,7 +225,7 @@ class TestFunctional(unittest.TestCase):
+ series.DoChecks()
+ cc_file = series.MakeCcFile(process_tags, cover_fname,
+ not ignore_bad_tags, add_maintainers,
+- None)
++ None, get_maintainer_script)
+ cmd = gitutil.email_patches(
+ series, cover_fname, args, dry_run, not ignore_bad_tags,
+ cc_file, in_reply_to=in_reply_to, thread=None)
+@@ -502,6 +520,37 @@ complicated as possible''')
+ finally:
+ os.chdir(orig_dir)
+
++ def test_custom_get_maintainer_script(self):
++ """Validate that a custom get_maintainer script gets used."""
++ self.make_git_tree()
++ with directory_excursion(self.gitdir):
++ # Setup git.
++ os.environ['GIT_CONFIG_GLOBAL'] = '/dev/null'
++ os.environ['GIT_CONFIG_SYSTEM'] = '/dev/null'
++ tools.run('git', 'config', 'user.name', 'Dummy')
++ tools.run('git', 'config', 'user.email', 'dumdum@dummy.com')
++ tools.run('git', 'branch', 'upstream')
++ tools.run('git', 'branch', '--set-upstream-to=upstream')
++ tools.run('git', 'add', '.')
++ tools.run('git', 'commit', '-m', 'new commit')
++
++ # Setup patman configuration.
++ with open('.patman', 'w', buffering=1) as f:
++ f.write('[settings]\n'
++ 'get_maintainer_script: dummy-script.sh\n'
++ 'check_patch: False\n')
++ with open('dummy-script.sh', 'w', buffering=1) as f:
++ f.write('#!/usr/bin/env python\n'
++ 'print("hello@there.com")\n')
++ os.chmod('dummy-script.sh', 0x555)
++
++ # Finally, do the test
++ with capture_sys_output():
++ output = tools.run(PATMAN_DIR / 'patman', '--dry-run')
++ # Assert the email address is part of the dry-run
++ # output.
++ self.assertIn('hello@there.com', output)
++
+ def test_tags(self):
+ """Test collection of tags in a patchstream"""
+ text = '''This is a patch
+diff --git a/tools/patman/get_maintainer.py b/tools/patman/get_maintainer.py
+index e1d15ff6ab..f7011be1e4 100644
+--- a/tools/patman/get_maintainer.py
++++ b/tools/patman/get_maintainer.py
+@@ -1,48 +1,61 @@
+ # SPDX-License-Identifier: GPL-2.0+
+ # Copyright (c) 2012 The Chromium OS Authors.
++# Copyright (c) 2022 Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com>
+ #
+
+ import os
++import shlex
++import shutil
+
+ from patman import command
++from patman import gitutil
+
+-def find_get_maintainer(try_list):
+- """Look for the get_maintainer.pl script.
+
+- Args:
+- try_list: List of directories to try for the get_maintainer.pl script
++def find_get_maintainer(script_file_name):
++ """Try to find where `script_file_name` is.
+
+- Returns:
+- If the script is found we'll return a path to it; else None.
++ It searches in PATH and falls back to a path relative to the top
++ of the current git repository.
+ """
+- # Look in the list
+- for path in try_list:
+- fname = os.path.join(path, 'get_maintainer.pl')
+- if os.path.isfile(fname):
+- return fname
++ get_maintainer = shutil.which(script_file_name)
++ if get_maintainer:
++ return get_maintainer
++
++ git_relative_script = os.path.join(gitutil.get_top_level(),
++ script_file_name)
++ if os.path.exists(git_relative_script):
++ return git_relative_script
+
+- return None
+
+-def get_maintainer(dir_list, fname, verbose=False):
+- """Run get_maintainer.pl on a file if we find it.
++def get_maintainer(script_file_name, fname, verbose=False):
++ """Run `script_file_name` on a file.
+
+- We look for get_maintainer.pl in the 'scripts' directory at the top of
+- git. If we find it we'll run it. If we don't find get_maintainer.pl
+- then we fail silently.
++ `script_file_name` should be a get_maintainer.pl-like script that
++ takes a patch file name as an input and return the email addresses
++ of the associated maintainers to standard output, one per line.
++
++ If `script_file_name` does not exist we fail silently.
+
+ Args:
+- dir_list: List of directories to try for the get_maintainer.pl script
+- fname: Path to the patch file to run get_maintainer.pl on.
++ script_file_name: The file name of the get_maintainer.pl script
++ (or compatible).
++ fname: File name of the patch to process with get_maintainer.pl.
+
+ Returns:
+ A list of email addresses to CC to.
+ """
+- get_maintainer = find_get_maintainer(dir_list)
++ # Expand `script_file_name` into a file name and its arguments, if
++ # any.
++ cmd_args = shlex.split(script_file_name)
++ file_name = cmd_args[0]
++ arguments = cmd_args[1:]
++
++ get_maintainer = find_get_maintainer(file_name)
+ if not get_maintainer:
+ if verbose:
+ print("WARNING: Couldn't find get_maintainer.pl")
+ return []
+
+- stdout = command.output(get_maintainer, '--norolestats', fname)
++ stdout = command.output(get_maintainer, *arguments, fname)
+ lines = stdout.splitlines()
+- return [ x.replace('"', '') for x in lines ]
++ return [x.replace('"', '') for x in lines]
+diff --git a/tools/patman/gitutil.py b/tools/patman/gitutil.py
+index ceaf2ce150..5e742102c2 100644
+--- a/tools/patman/gitutil.py
++++ b/tools/patman/gitutil.py
+@@ -2,21 +2,19 @@
+ # Copyright (c) 2011 The Chromium OS Authors.
+ #
+
+-import re
+ import os
+-import subprocess
+ import sys
+
+ from patman import command
+ from patman import settings
+ from patman import terminal
+-from patman import tools
+
+ # True to use --no-decorate - we check this in setup()
+ use_no_decorate = True
+
++
+ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
+- count=None):
++ count=None):
+ """Create a command to perform a 'git log'
+
+ Args:
+@@ -49,6 +47,7 @@ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
+ cmd.append('--')
+ return cmd
+
++
+ def count_commits_to_branch(branch):
+ """Returns number of commits between HEAD and the tracking branch.
+
+@@ -68,13 +67,14 @@ def count_commits_to_branch(branch):
+ rev_range = '@{upstream}..'
+ pipe = [log_cmd(rev_range, oneline=True)]
+ result = command.run_pipe(pipe, capture=True, capture_stderr=True,
+- oneline=True, raise_on_error=False)
++ oneline=True, raise_on_error=False)
+ if result.return_code:
+ raise ValueError('Failed to determine upstream: %s' %
+ result.stderr.strip())
+ patch_count = len(result.stdout.splitlines())
+ return patch_count
+
++
+ def name_revision(commit_hash):
+ """Gets the revision name for a commit
+
+@@ -91,6 +91,7 @@ def name_revision(commit_hash):
+ name = stdout.split(' ')[1].strip()
+ return name
+
++
+ def guess_upstream(git_dir, branch):
+ """Tries to guess the upstream for a branch
+
+@@ -109,7 +110,7 @@ def guess_upstream(git_dir, branch):
+ """
+ pipe = [log_cmd(branch, git_dir=git_dir, oneline=True, count=100)]
+ result = command.run_pipe(pipe, capture=True, capture_stderr=True,
+- raise_on_error=False)
++ raise_on_error=False)
+ if result.return_code:
+ return None, "Branch '%s' not found" % branch
+ for line in result.stdout.splitlines()[1:]:
+@@ -121,6 +122,7 @@ def guess_upstream(git_dir, branch):
+ return name, "Guessing upstream as '%s'" % name
+ return None, "Cannot find a suitable upstream for branch '%s'" % branch
+
++
+ def get_upstream(git_dir, branch):
+ """Returns the name of the upstream for a branch
+
+@@ -135,10 +137,10 @@ def get_upstream(git_dir, branch):
+ """
+ try:
+ remote = command.output_one_line('git', '--git-dir', git_dir, 'config',
+- 'branch.%s.remote' % branch)
++ 'branch.%s.remote' % branch)
+ merge = command.output_one_line('git', '--git-dir', git_dir, 'config',
+- 'branch.%s.merge' % branch)
+- except:
++ 'branch.%s.merge' % branch)
++ except Exception:
+ upstream, msg = guess_upstream(git_dir, branch)
+ return upstream, msg
+
+@@ -149,7 +151,8 @@ def get_upstream(git_dir, branch):
+ return '%s/%s' % (remote, leaf), None
+ else:
+ raise ValueError("Cannot determine upstream branch for branch "
+- "'%s' remote='%s', merge='%s'" % (branch, remote, merge))
++ "'%s' remote='%s', merge='%s'"
++ % (branch, remote, merge))
+
+
+ def get_range_in_branch(git_dir, branch, include_upstream=False):
+@@ -168,6 +171,7 @@ def get_range_in_branch(git_dir, branch, include_upstream=False):
+ rstr = '%s%s..%s' % (upstream, '~' if include_upstream else '', branch)
+ return rstr, msg
+
++
+ def count_commits_in_range(git_dir, range_expr):
+ """Returns the number of commits in the given range.
+
+@@ -180,12 +184,13 @@ def count_commits_in_range(git_dir, range_expr):
+ """
+ pipe = [log_cmd(range_expr, git_dir=git_dir, oneline=True)]
+ result = command.run_pipe(pipe, capture=True, capture_stderr=True,
+- raise_on_error=False)
++ raise_on_error=False)
+ if result.return_code:
+ return None, "Range '%s' not found or is invalid" % range_expr
+ patch_count = len(result.stdout.splitlines())
+ return patch_count, None
+
++
+ def count_commits_in_branch(git_dir, branch, include_upstream=False):
+ """Returns the number of commits in the given branch.
+
+@@ -201,6 +206,7 @@ def count_commits_in_branch(git_dir, branch, include_upstream=False):
+ return None, msg
+ return count_commits_in_range(git_dir, range_expr)
+
++
+ def count_commits(commit_range):
+ """Returns the number of commits in the given range.
+
+@@ -215,6 +221,7 @@ def count_commits(commit_range):
+ patch_count = int(stdout)
+ return patch_count
+
++
+ def checkout(commit_hash, git_dir=None, work_tree=None, force=False):
+ """Checkout the selected commit for this build
+
+@@ -231,10 +238,11 @@ def checkout(commit_hash, git_dir=None, work_tree=None, force=False):
+ pipe.append('-f')
+ pipe.append(commit_hash)
+ result = command.run_pipe([pipe], capture=True, raise_on_error=False,
+- capture_stderr=True)
++ capture_stderr=True)
+ if result.return_code != 0:
+ raise OSError('git checkout (%s): %s' % (pipe, result.stderr))
+
++
+ def clone(git_dir, output_dir):
+ """Checkout the selected commit for this build
+
+@@ -243,10 +251,11 @@ def clone(git_dir, output_dir):
+ """
+ pipe = ['git', 'clone', git_dir, '.']
+ result = command.run_pipe([pipe], capture=True, cwd=output_dir,
+- capture_stderr=True)
++ capture_stderr=True)
+ if result.return_code != 0:
+ raise OSError('git clone: %s' % result.stderr)
+
++
+ def fetch(git_dir=None, work_tree=None):
+ """Fetch from the origin repo
+
+@@ -263,6 +272,7 @@ def fetch(git_dir=None, work_tree=None):
+ if result.return_code != 0:
+ raise OSError('git fetch: %s' % result.stderr)
+
++
+ def check_worktree_is_available(git_dir):
+ """Check if git-worktree functionality is available
+
+@@ -274,9 +284,10 @@ def check_worktree_is_available(git_dir):
+ """
+ pipe = ['git', '--git-dir', git_dir, 'worktree', 'list']
+ result = command.run_pipe([pipe], capture=True, capture_stderr=True,
+- raise_on_error=False)
++ raise_on_error=False)
+ return result.return_code == 0
+
++
+ def add_worktree(git_dir, output_dir, commit_hash=None):
+ """Create and checkout a new git worktree for this build
+
+@@ -290,10 +301,11 @@ def add_worktree(git_dir, output_dir, commit_hash=None):
+ if commit_hash:
+ pipe.append(commit_hash)
+ result = command.run_pipe([pipe], capture=True, cwd=output_dir,
+- capture_stderr=True)
++ capture_stderr=True)
+ if result.return_code != 0:
+ raise OSError('git worktree add: %s' % result.stderr)
+
++
+ def prune_worktrees(git_dir):
+ """Remove administrative files for deleted worktrees
+
+@@ -305,7 +317,8 @@ def prune_worktrees(git_dir):
+ if result.return_code != 0:
+ raise OSError('git worktree prune: %s' % result.stderr)
+
+-def create_patches(branch, start, count, ignore_binary, series, signoff = True):
++
++def create_patches(branch, start, count, ignore_binary, series, signoff=True):
+ """Create a series of patches from the top of the current branch.
+
+ The patch files are written to the current directory using
+@@ -321,9 +334,7 @@ def create_patches(branch, start, count, ignore_binary, series, signoff = True):
+ Filename of cover letter (None if none)
+ List of filenames of patch files
+ """
+- if series.get('version'):
+- version = '%s ' % series['version']
+- cmd = ['git', 'format-patch', '-M' ]
++ cmd = ['git', 'format-patch', '-M']
+ if signoff:
+ cmd.append('--signoff')
+ if ignore_binary:
+@@ -341,9 +352,10 @@ def create_patches(branch, start, count, ignore_binary, series, signoff = True):
+
+ # We have an extra file if there is a cover letter
+ if series.get('cover'):
+- return files[0], files[1:]
++ return files[0], files[1:]
+ else:
+- return None, files
++ return None, files
++
+
+ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
+ """Build a list of email addresses based on an input list.
+@@ -385,40 +397,43 @@ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
+ raw += lookup_email(item, alias, warn_on_error=warn_on_error)
+ result = []
+ for item in raw:
+- if not item in result:
++ if item not in result:
+ result.append(item)
+ if tag:
+ return ['%s %s%s%s' % (tag, quote, email, quote) for email in result]
+ return result
+
++
+ def check_suppress_cc_config():
+ """Check if sendemail.suppresscc is configured correctly.
+
+ Returns:
+ True if the option is configured correctly, False otherwise.
+ """
+- suppresscc = command.output_one_line('git', 'config', 'sendemail.suppresscc',
+- raise_on_error=False)
++ suppresscc = command.output_one_line(
++ 'git', 'config', 'sendemail.suppresscc', raise_on_error=False)
+
+ # Other settings should be fine.
+ if suppresscc == 'all' or suppresscc == 'cccmd':
+ col = terminal.Color()
+
+ print((col.build(col.RED, "error") +
+- ": git config sendemail.suppresscc set to %s\n" % (suppresscc)) +
+- " patman needs --cc-cmd to be run to set the cc list.\n" +
+- " Please run:\n" +
+- " git config --unset sendemail.suppresscc\n" +
+- " Or read the man page:\n" +
+- " git send-email --help\n" +
+- " and set an option that runs --cc-cmd\n")
++ ": git config sendemail.suppresscc set to %s\n"
++ % (suppresscc)) +
++ " patman needs --cc-cmd to be run to set the cc list.\n" +
++ " Please run:\n" +
++ " git config --unset sendemail.suppresscc\n" +
++ " Or read the man page:\n" +
++ " git send-email --help\n" +
++ " and set an option that runs --cc-cmd\n")
+ return False
+
+ return True
+
++
+ def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
+- self_only=False, alias=None, in_reply_to=None, thread=False,
+- smtp_server=None):
++ self_only=False, alias=None, in_reply_to=None, thread=False,
++ smtp_server=None, get_maintainer_script=None):
+ """Email a patch series.
+
+ Args:
+@@ -435,6 +450,7 @@ def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
+ thread: True to add --thread to git send-email (make
+ all patches reply to cover-letter or first patch in series)
+ smtp_server: SMTP server to use to send patches
++ get_maintainer_script: File name of script to get maintainers emails
+
+ Returns:
+ Git command that was/would be run
+@@ -487,9 +503,10 @@ send --cc-cmd cc-fname" cover p1 p2'
+ "git config sendemail.to u-boot@lists.denx.de")
+ return
+ cc = build_email_list(list(set(series.get('cc')) - set(series.get('to'))),
+- '--cc', alias, warn_on_error)
++ '--cc', alias, warn_on_error)
+ if self_only:
+- to = build_email_list([os.getenv('USER')], '--to', alias, warn_on_error)
++ to = build_email_list([os.getenv('USER')], '--to',
++ alias, warn_on_error)
+ cc = []
+ cmd = ['git', 'send-email', '--annotate']
+ if smtp_server:
+@@ -565,7 +582,7 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0):
+ if not alias:
+ alias = settings.alias
+ lookup_name = lookup_name.strip()
+- if '@' in lookup_name: # Perhaps a real email address
++ if '@' in lookup_name: # Perhaps a real email address
+ return [lookup_name]
+
+ lookup_name = lookup_name.lower()
+@@ -581,7 +598,7 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0):
+ return out_list
+
+ if lookup_name:
+- if not lookup_name in alias:
++ if lookup_name not in alias:
+ msg = "Alias '%s' not found" % lookup_name
+ if warn_on_error:
+ print(col.build(col.RED, msg))
+@@ -589,11 +606,12 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0):
+ for item in alias[lookup_name]:
+ todo = lookup_email(item, alias, warn_on_error, level + 1)
+ for new_item in todo:
+- if not new_item in out_list:
++ if new_item not in out_list:
+ out_list.append(new_item)
+
+ return out_list
+
++
+ def get_top_level():
+ """Return name of top-level directory for this git repo.
+
+@@ -608,6 +626,7 @@ def get_top_level():
+ """
+ return command.output_one_line('git', 'rev-parse', '--show-toplevel')
+
++
+ def get_alias_file():
+ """Gets the name of the git alias file.
+
+@@ -615,7 +634,7 @@ def get_alias_file():
+ Filename of git alias file, or None if none
+ """
+ fname = command.output_one_line('git', 'config', 'sendemail.aliasesfile',
+- raise_on_error=False)
++ raise_on_error=False)
+ if not fname:
+ return None
+
+@@ -625,6 +644,7 @@ def get_alias_file():
+
+ return os.path.join(get_top_level(), fname)
+
++
+ def get_default_user_name():
+ """Gets the user.name from .gitconfig file.
+
+@@ -634,6 +654,7 @@ def get_default_user_name():
+ uname = command.output_one_line('git', 'config', '--global', 'user.name')
+ return uname
+
++
+ def get_default_user_email():
+ """Gets the user.email from the global .gitconfig file.
+
+@@ -643,17 +664,19 @@ def get_default_user_email():
+ uemail = command.output_one_line('git', 'config', '--global', 'user.email')
+ return uemail
+
++
+ def get_default_subject_prefix():
+ """Gets the format.subjectprefix from local .git/config file.
+
+ Returns:
+ Subject prefix found in local .git/config file, or None if none
+ """
+- sub_prefix = command.output_one_line('git', 'config', 'format.subjectprefix',
+- raise_on_error=False)
++ sub_prefix = command.output_one_line(
++ 'git', 'config', 'format.subjectprefix', raise_on_error=False)
+
+ return sub_prefix
+
++
+ def setup():
+ """Set up git utils, by reading the alias files."""
+ # Check for a git alias file also
+@@ -666,6 +689,7 @@ def setup():
+ use_no_decorate = (command.run_pipe([cmd], raise_on_error=False)
+ .return_code == 0)
+
++
+ def get_head():
+ """Get the hash of the current HEAD
+
+@@ -674,6 +698,7 @@ def get_head():
+ """
+ return command.output_one_line('git', 'show', '-s', '--pretty=format:%H')
+
++
+ if __name__ == "__main__":
+ import doctest
+
+diff --git a/tools/patman/patman b/tools/patman/patman
+index 11a5d8e18a..5a427d1942 120000
+--- a/tools/patman/patman
++++ b/tools/patman/patman
+@@ -1 +1 @@
+-main.py
+\ No newline at end of file
++__main__.py
+\ No newline at end of file
+diff --git a/tools/patman/patman.rst b/tools/patman/patman.rst
+index 8c5c9cc2cc..6113962fb4 100644
+--- a/tools/patman/patman.rst
++++ b/tools/patman/patman.rst
+@@ -1,6 +1,7 @@
+ .. SPDX-License-Identifier: GPL-2.0+
+ .. Copyright (c) 2011 The Chromium OS Authors
+ .. Simon Glass <sjg@chromium.org>
++.. Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com>
+ .. v1, v2, 19-Oct-11
+ .. revised v3 24-Nov-11
+ .. revised v4 Independence Day 2020, with Patchwork integration
+@@ -68,13 +69,28 @@ this once::
+
+ git config sendemail.aliasesfile doc/git-mailrc
+
+-For both Linux and U-Boot the 'scripts/get_maintainer.pl' handles figuring
+-out where to send patches pretty well.
++For both Linux and U-Boot the 'scripts/get_maintainer.pl' handles
++figuring out where to send patches pretty well. For other projects,
++you may want to specify a different script to be run, for example via
++a project-specific `.patman` file::
++
++ # .patman configuration file at the root of some project
++
++ [settings]
++ get_maintainer_script: etc/teams.scm get-maintainer
++
++The `get_maintainer_script` option corresponds to the
++`--get-maintainer-script` argument of the `send` command. It is
++looked relatively to the root of the current git repository, as well
++as on PATH. It can also be provided arguments, as shown above. The
++contract is that the script should accept a patch file name and return
++a list of email addresses, one per line, like `get_maintainer.pl`
++does.
+
+ During the first run patman creates a config file for you by taking the default
+ user name and email address from the global .gitconfig file.
+
+-To add your own, create a file ~/.patman like this::
++To add your own, create a file `~/.patman` like this::
+
+ # patman alias file
+
+@@ -85,6 +101,12 @@ To add your own, create a file ~/.patman like this::
+ wolfgang: Wolfgang Denk <wd@denx.de>
+ others: Mike Frysinger <vapier@gentoo.org>, Fred Bloggs <f.bloggs@napier.net>
+
++As hinted above, Patman will also look for a `.patman` configuration
++file at the root of the current project git repository, which makes it
++possible to override the `project` settings variable or anything else
++in a project-specific way. The values of this "local" configuration
++file take precedence over those of the "global" one.
++
+ Aliases are recursive.
+
+ The checkpatch.pl in the U-Boot tools/ subdirectory will be located and
+@@ -680,6 +702,16 @@ them:
+
+ $ tools/patman/patman test
+
++Note that since the test suite depends on data files only available in
++the git checkout, the `test` command is hidden unless `patman` is
++invoked from the U-Boot git repository.
++
++Alternatively, you can run the test suite via Pytest:
++
++.. code-block:: bash
++
++ $ cd tools/patman && pytest
++
+ Error handling doesn't always produce friendly error messages - e.g.
+ putting an incorrect tag in a commit may provide a confusing message.
+
+diff --git a/tools/patman/pytest.ini b/tools/patman/pytest.ini
+new file mode 100644
+index 0000000000..df3eb518d0
+--- /dev/null
++++ b/tools/patman/pytest.ini
+@@ -0,0 +1,2 @@
++[pytest]
++addopts = --doctest-modules
+diff --git a/tools/patman/series.py b/tools/patman/series.py
+index 3075378ac1..2eeeef71dc 100644
+--- a/tools/patman/series.py
++++ b/tools/patman/series.py
+@@ -235,7 +235,7 @@ class Series(dict):
+ print(col.build(col.RED, str))
+
+ def MakeCcFile(self, process_tags, cover_fname, warn_on_error,
+- add_maintainers, limit):
++ add_maintainers, limit, get_maintainer_script):
+ """Make a cc file for us to use for per-commit Cc automation
+
+ Also stores in self._generated_cc to make ShowActions() faster.
+@@ -249,6 +249,8 @@ class Series(dict):
+ True/False to call the get_maintainers to CC maintainers
+ List of maintainers to include (for testing)
+ limit: Limit the length of the Cc list (None if no limit)
++ get_maintainer_script: The file name of the get_maintainer.pl
++ script (or compatible).
+ Return:
+ Filename of temp file created
+ """
+@@ -267,8 +269,9 @@ class Series(dict):
+ if type(add_maintainers) == type(cc):
+ cc += add_maintainers
+ elif add_maintainers:
+- dir_list = [os.path.join(gitutil.get_top_level(), 'scripts')]
+- cc += get_maintainer.get_maintainer(dir_list, commit.patch)
++
++ cc += get_maintainer.get_maintainer(get_maintainer_script,
++ commit.patch)
+ for x in set(cc) & set(settings.bounces):
+ print(col.build(col.YELLOW, 'Skipping "%s"' % x))
+ cc = list(set(cc) - set(settings.bounces))
+diff --git a/tools/patman/settings.py b/tools/patman/settings.py
+index 903d6fcb0b..636983e32d 100644
+--- a/tools/patman/settings.py
++++ b/tools/patman/settings.py
+@@ -1,18 +1,18 @@
+ # SPDX-License-Identifier: GPL-2.0+
+ # Copyright (c) 2011 The Chromium OS Authors.
++# Copyright (c) 2022 Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com>
+ #
+
+ try:
+ import configparser as ConfigParser
+-except:
++except Exception:
+ import ConfigParser
+
+ import argparse
+ import os
+ import re
+
+-from patman import command
+-from patman import tools
++from patman import gitutil
+
+ """Default settings per-project.
+
+@@ -32,7 +32,8 @@ _default_settings = {
+ },
+ }
+
+-class _ProjectConfigParser(ConfigParser.SafeConfigParser):
++
++class _ProjectConfigParser(ConfigParser.ConfigParser):
+ """ConfigParser that handles projects.
+
+ There are two main goals of this class:
+@@ -83,14 +84,14 @@ class _ProjectConfigParser(ConfigParser.SafeConfigParser):
+ def __init__(self, project_name):
+ """Construct _ProjectConfigParser.
+
+- In addition to standard SafeConfigParser initialization, this also loads
+- project defaults.
++ In addition to standard ConfigParser initialization, this also
++ loads project defaults.
+
+ Args:
+ project_name: The name of the project.
+ """
+ self._project_name = project_name
+- ConfigParser.SafeConfigParser.__init__(self)
++ ConfigParser.ConfigParser.__init__(self)
+
+ # Update the project settings in the config based on
+ # the _default_settings global.
+@@ -102,31 +103,31 @@ class _ProjectConfigParser(ConfigParser.SafeConfigParser):
+ self.set(project_settings, setting_name, setting_value)
+
+ def get(self, section, option, *args, **kwargs):
+- """Extend SafeConfigParser to try project_section before section.
++ """Extend ConfigParser to try project_section before section.
+
+ Args:
+- See SafeConfigParser.
++ See ConfigParser.
+ Returns:
+- See SafeConfigParser.
++ See ConfigParser.
+ """
+ try:
+- val = ConfigParser.SafeConfigParser.get(
++ val = ConfigParser.ConfigParser.get(
+ self, "%s_%s" % (self._project_name, section), option,
+ *args, **kwargs
+ )
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
+- val = ConfigParser.SafeConfigParser.get(
++ val = ConfigParser.ConfigParser.get(
+ self, section, option, *args, **kwargs
+ )
+ return val
+
+ def items(self, section, *args, **kwargs):
+- """Extend SafeConfigParser to add project_section to section.
++ """Extend ConfigParser to add project_section to section.
+
+ Args:
+- See SafeConfigParser.
++ See ConfigParser.
+ Returns:
+- See SafeConfigParser.
++ See ConfigParser.
+ """
+ project_items = []
+ has_project_section = False
+@@ -134,7 +135,7 @@ class _ProjectConfigParser(ConfigParser.SafeConfigParser):
+
+ # Get items from the project section
+ try:
+- project_items = ConfigParser.SafeConfigParser.items(
++ project_items = ConfigParser.ConfigParser.items(
+ self, "%s_%s" % (self._project_name, section), *args, **kwargs
+ )
+ has_project_section = True
+@@ -143,7 +144,7 @@ class _ProjectConfigParser(ConfigParser.SafeConfigParser):
+
+ # Get top-level items
+ try:
+- top_items = ConfigParser.SafeConfigParser.items(
++ top_items = ConfigParser.ConfigParser.items(
+ self, section, *args, **kwargs
+ )
+ except ConfigParser.NoSectionError:
+@@ -155,6 +156,7 @@ class _ProjectConfigParser(ConfigParser.SafeConfigParser):
+ item_dict.update(project_items)
+ return {(item, val) for item, val in item_dict.items()}
+
++
+ def ReadGitAliases(fname):
+ """Read a git alias file. This is in the form used by git:
+
+@@ -170,7 +172,7 @@ def ReadGitAliases(fname):
+ print("Warning: Cannot find alias file '%s'" % fname)
+ return
+
+- re_line = re.compile('alias\s+(\S+)\s+(.*)')
++ re_line = re.compile(r'alias\s+(\S+)\s+(.*)')
+ for line in fd.readlines():
+ line = line.strip()
+ if not line or line[0] == '#':
+@@ -190,7 +192,8 @@ def ReadGitAliases(fname):
+
+ fd.close()
+
+-def CreatePatmanConfigFile(gitutil, config_fname):
++
++def CreatePatmanConfigFile(config_fname):
+ """Creates a config file under $(HOME)/.patman if it can't find one.
+
+ Args:
+@@ -200,12 +203,12 @@ def CreatePatmanConfigFile(gitutil, config_fname):
+ None
+ """
+ name = gitutil.get_default_user_name()
+- if name == None:
++ if name is None:
+ name = input("Enter name: ")
+
+ email = gitutil.get_default_user_email()
+
+- if email == None:
++ if email is None:
+ email = input("Enter email: ")
+
+ try:
+@@ -220,7 +223,8 @@ me: %s <%s>
+ [bounces]
+ nxp = Zhikang Zhang <zhikang.zhang@nxp.com>
+ ''' % (name, email), file=f)
+- f.close();
++ f.close()
++
+
+ def _UpdateDefaults(main_parser, config):
+ """Update the given OptionParser defaults based on config.
+@@ -242,8 +246,8 @@ def _UpdateDefaults(main_parser, config):
+ # Find all the parsers and subparsers
+ parsers = [main_parser]
+ parsers += [subparser for action in main_parser._actions
+- if isinstance(action, argparse._SubParsersAction)
+- for _, subparser in action.choices.items()]
++ if isinstance(action, argparse._SubParsersAction)
++ for _, subparser in action.choices.items()]
+
+ # Collect the defaults from each parser
+ defaults = {}
+@@ -270,8 +274,9 @@ def _UpdateDefaults(main_parser, config):
+ # Set all the defaults and manually propagate them to subparsers
+ main_parser.set_defaults(**defaults)
+ for parser, pdefs in zip(parsers, parser_defaults):
+- parser.set_defaults(**{ k: v for k, v in defaults.items()
+- if k in pdefs })
++ parser.set_defaults(**{k: v for k, v in defaults.items()
++ if k in pdefs})
++
+
+ def _ReadAliasFile(fname):
+ """Read in the U-Boot git alias file if it exists.
+@@ -298,6 +303,7 @@ def _ReadAliasFile(fname):
+ if bad_line:
+ print(bad_line)
+
++
+ def _ReadBouncesFile(fname):
+ """Read in the bounces file if it exists
+
+@@ -311,6 +317,7 @@ def _ReadBouncesFile(fname):
+ continue
+ bounces.add(line.strip())
+
++
+ def GetItems(config, section):
+ """Get the items from a section of the config.
+
+@@ -323,31 +330,50 @@ def GetItems(config, section):
+ """
+ try:
+ return config.items(section)
+- except ConfigParser.NoSectionError as e:
++ except ConfigParser.NoSectionError:
+ return []
+- except:
+- raise
+
+-def Setup(gitutil, parser, project_name, config_fname=''):
++
++def Setup(parser, project_name, config_fname=None):
+ """Set up the settings module by reading config files.
+
++ Unless `config_fname` is specified, a `.patman` config file local
++ to the git repository is consulted, followed by the global
++ `$HOME/.patman`. If none exists, the later is created. Values
++ defined in the local config file take precedence over those
++ defined in the global one.
++
+ Args:
+- parser: The parser to update
++ parser: The parser to update.
+ project_name: Name of project that we're working on; we'll look
+ for sections named "project_section" as well.
+- config_fname: Config filename to read ('' for default)
++ config_fname: Config filename to read. An error is raised if it
++ does not exist.
+ """
+ # First read the git alias file if available
+ _ReadAliasFile('doc/git-mailrc')
+ config = _ProjectConfigParser(project_name)
+- if config_fname == '':
++
++ if config_fname and not os.path.exists(config_fname):
++ raise Exception(f'provided {config_fname} does not exist')
++
++ if not config_fname:
+ config_fname = '%s/.patman' % os.getenv('HOME')
++ has_config = os.path.exists(config_fname)
++
++ git_local_config_fname = os.path.join(gitutil.get_top_level(), '.patman')
++ has_git_local_config = os.path.exists(git_local_config_fname)
+
+- if not os.path.exists(config_fname):
+- print("No config file found ~/.patman\nCreating one...\n")
+- CreatePatmanConfigFile(gitutil, config_fname)
++ # Read the git local config last, so that its values override
++ # those of the global config, if any.
++ if has_config:
++ config.read(config_fname)
++ if has_git_local_config:
++ config.read(git_local_config_fname)
+
+- config.read(config_fname)
++ if not (has_config or has_git_local_config):
++ print("No config file found.\nCreating ~/.patman...\n")
++ CreatePatmanConfigFile(config_fname)
+
+ for name, value in GetItems(config, 'alias'):
+ alias[name] = value.split(',')
+@@ -358,6 +384,7 @@ def Setup(gitutil, parser, project_name, config_fname=''):
+
+ _UpdateDefaults(parser, config)
+
++
+ # These are the aliases we understand, indexed by alias. Each member is a list.
+ alias = {}
+ bounces = set()
+diff --git a/tools/patman/setup.py b/tools/patman/setup.py
+index 5643bf1503..2ff791da0f 100644
+--- a/tools/patman/setup.py
++++ b/tools/patman/setup.py
+@@ -7,6 +7,6 @@ setup(name='patman',
+ scripts=['patman'],
+ packages=['patman'],
+ package_dir={'patman': ''},
+- package_data={'patman': ['README']},
++ package_data={'patman': ['README.rst']},
+ classifiers=['Environment :: Console',
+ 'Topic :: Software Development'])
+diff --git a/tools/patman/test_settings.py b/tools/patman/test_settings.py
+new file mode 100644
+index 0000000000..c768a2fc64
+--- /dev/null
++++ b/tools/patman/test_settings.py
+@@ -0,0 +1,67 @@
++# SPDX-License-Identifier: GPL-2.0+
++#
++# Copyright (c) 2022 Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com>
++#
++
++import argparse
++import contextlib
++import os
++import sys
++import tempfile
++
++from patman import settings
++from patman import tools
++
++
++@contextlib.contextmanager
++def empty_git_repository():
++ with tempfile.TemporaryDirectory() as tmpdir:
++ os.chdir(tmpdir)
++ tools.run('git', 'init', raise_on_error=True)
++ yield tmpdir
++
++
++@contextlib.contextmanager
++def cleared_command_line_args():
++ old_value = sys.argv[:]
++ sys.argv = [sys.argv[0]]
++ try:
++ yield
++ finally:
++ sys.argv = old_value
++
++
++def test_git_local_config():
++ # Clearing the command line arguments is required, otherwise
++ # arguments passed to the test running such as in 'pytest -k
++ # filter' would be processed by _UpdateDefaults and fail.
++ with cleared_command_line_args():
++ with empty_git_repository():
++ with tempfile.NamedTemporaryFile() as global_config:
++ global_config.write(b'[settings]\n'
++ b'project=u-boot\n')
++ global_config.flush()
++ parser = argparse.ArgumentParser()
++ parser.add_argument('-p', '--project', default='unknown')
++ subparsers = parser.add_subparsers(dest='cmd')
++ send = subparsers.add_parser('send')
++ send.add_argument('--no-check', action='store_false',
++ dest='check_patch', default=True)
++
++ # Test "global" config is used.
++ settings.Setup(parser, 'unknown', global_config.name)
++ args, _ = parser.parse_known_args([])
++ assert args.project == 'u-boot'
++ send_args, _ = send.parse_known_args([])
++ assert send_args.check_patch
++
++ # Test local config can shadow it.
++ with open('.patman', 'w', buffering=1) as f:
++ f.write('[settings]\n'
++ 'project: guix-patches\n'
++ 'check_patch: False\n')
++ settings.Setup(parser, 'unknown', global_config.name)
++ args, _ = parser.parse_known_args([])
++ assert args.project == 'guix-patches'
++ send_args, _ = send.parse_known_args([])
++ assert not send_args.check_patch
diff --git a/gnu/packages/patches/ultrastar-deluxe-no-freesans.patch b/gnu/packages/patches/ultrastar-deluxe-no-freesans.patch
new file mode 100644
index 0000000000..7beba80774
--- /dev/null
+++ b/gnu/packages/patches/ultrastar-deluxe-no-freesans.patch
@@ -0,0 +1,31 @@
+Remove references to FreeSans font, which is not packaged for Guix.
+
+--- a/game/fonts/fonts.ini 1970-01-01 01:00:01.000000000 +0100
++++ b/game/fonts/fonts.ini 2022-09-16 14:31:51.483096847 +0200
+@@ -116,26 +116,6 @@
+ BoldHighResPreCache=0
+ BoldHighResOutline=0.02
+
+-[Font_FreeSans]
+-Name=Free Sans
+-RegularFile=FreeSans/FreeSans.ttf
+-RegularFallbackFile1=wqy-microhei/wqy-microhei.ttc
+-;RegularGlyphSpacing=1.4
+-;RegularStretch=1.2
+-BoldFile=FreeSans/FreeSansBold.ttf
+-BoldFallbackFile1=wqy-microhei/wqy-microhei.ttc
+-;BoldEmbolden=0.06
+-OutlineFile=FreeSans/FreeSansBold.ttf
+-OutlineOutline=0.06
+-;OutlineColorR=0.3
+-;OutlineColorG=0.3
+-;OutlineColorB=0.3
+-;OutlineColorA=0.3
+-BoldHighResFile=FreeSans/FreeSansBold.ttf
+-BoldHighResMaxResolution=256
+-BoldHighResPreCache=0
+-BoldHighResOutline=0.02
+-
+ [Font_DejaVuSans]
+ Name=DejaVu Sans
+ RegularFile=DejaVu/DejaVuSans.ttf
diff --git a/gnu/packages/patches/upx-CVE-2021-20285.patch b/gnu/packages/patches/upx-CVE-2021-20285.patch
deleted file mode 100644
index 1d47b2a8bb..0000000000
--- a/gnu/packages/patches/upx-CVE-2021-20285.patch
+++ /dev/null
@@ -1,76 +0,0 @@
-From 3781df9da23840e596d5e9e8493f22666802fe6c Mon Sep 17 00:00:00 2001
-From: John Reiser <jreiser@BitWagon.com>
-Date: Fri, 11 Dec 2020 13:38:18 -0800
-Subject: [PATCH] Check DT_REL/DT_RELA, DT_RELSZ/DT_RELASZ
-
-https://github.com/upx/upx/issues/421
- modified: p_lx_elf.cpp
----
- src/p_lx_elf.cpp | 34 +++++++++++++++++++++++++++++-----
- 1 file changed, 29 insertions(+), 5 deletions(-)
-
-diff --git a/src/p_lx_elf.cpp b/src/p_lx_elf.cpp
-index 182db192..3a4101cf 100644
---- a/src/p_lx_elf.cpp
-+++ b/src/p_lx_elf.cpp
-@@ -2222,8 +2222,20 @@ bool PackLinuxElf32::canPack()
- int z_rsz = dt_table[Elf32_Dyn::DT_RELSZ];
- if (z_rel && z_rsz) {
- unsigned rel_off = get_te32(&dynseg[-1+ z_rel].d_val);
-+ if ((unsigned)file_size <= rel_off) {
-+ char msg[70]; snprintf(msg, sizeof(msg),
-+ "bad Elf32_Dynamic[DT_REL] %#x\n",
-+ rel_off);
-+ throwCantPack(msg);
-+ }
- Elf32_Rel *rp = (Elf32_Rel *)&file_image[rel_off];
- unsigned relsz = get_te32(&dynseg[-1+ z_rsz].d_val);
-+ if ((unsigned)file_size <= relsz) {
-+ char msg[70]; snprintf(msg, sizeof(msg),
-+ "bad Elf32_Dynamic[DT_RELSZ] %#x\n",
-+ relsz);
-+ throwCantPack(msg);
-+ }
- Elf32_Rel *last = (Elf32_Rel *)(relsz + (char *)rp);
- for (; rp < last; ++rp) {
- unsigned r_va = get_te32(&rp->r_offset);
-@@ -2562,14 +2574,26 @@ PackLinuxElf64::canPack()
- int z_rel = dt_table[Elf64_Dyn::DT_RELA];
- int z_rsz = dt_table[Elf64_Dyn::DT_RELASZ];
- if (z_rel && z_rsz) {
-- unsigned rel_off = get_te64(&dynseg[-1+ z_rel].d_val);
-+ upx_uint64_t rel_off = get_te64(&dynseg[-1+ z_rel].d_val);
-+ if ((u64_t)file_size <= rel_off) {
-+ char msg[70]; snprintf(msg, sizeof(msg),
-+ "bad Elf64_Dynamic[DT_RELA] %#llx\n",
-+ rel_off);
-+ throwCantPack(msg);
-+ }
- Elf64_Rela *rp = (Elf64_Rela *)&file_image[rel_off];
-- unsigned relsz = get_te64(&dynseg[-1+ z_rsz].d_val);
-+ upx_uint64_t relsz = get_te64(&dynseg[-1+ z_rsz].d_val);
-+ if ((u64_t)file_size <= relsz) {
-+ char msg[70]; snprintf(msg, sizeof(msg),
-+ "bad Elf64_Dynamic[DT_RELASZ] %#llx\n",
-+ relsz);
-+ throwCantPack(msg);
-+ }
- Elf64_Rela *last = (Elf64_Rela *)(relsz + (char *)rp);
- for (; rp < last; ++rp) {
-- unsigned r_va = get_te64(&rp->r_offset);
-+ upx_uint64_t r_va = get_te64(&rp->r_offset);
- if (r_va == user_init_ava) { // found the Elf64_Rela
-- unsigned r_info = get_te64(&rp->r_info);
-+ upx_uint64_t r_info = get_te64(&rp->r_info);
- unsigned r_type = ELF64_R_TYPE(r_info);
- if (Elf64_Ehdr::EM_AARCH64 == e_machine
- && R_AARCH64_RELATIVE == r_type) {
-@@ -2581,7 +2605,7 @@ PackLinuxElf64::canPack()
- }
- else {
- char msg[50]; snprintf(msg, sizeof(msg),
-- "bad relocation %#x DT_INIT_ARRAY[0]",
-+ "bad relocation %#llx DT_INIT_ARRAY[0]",
- r_info);
- throwCantPack(msg);
- }
diff --git a/gnu/packages/patches/vtk-fix-freetypetools-build-failure.patch b/gnu/packages/patches/vtk-fix-freetypetools-build-failure.patch
deleted file mode 100644
index 23f651b5eb..0000000000
--- a/gnu/packages/patches/vtk-fix-freetypetools-build-failure.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-This fixes a build failure in VTK when building against recent versions
-of freetype.
-
- https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7432
-
-Patch by Ben Boeckel <ben.boeckel@kitware.com>
-
-Subject: [PATCH] vtkFreeTypeTools: avoid using an internal macro
-
-This macro has been removed upstream as it was always intended to be
-private.
----
- Rendering/FreeType/vtkFreeTypeTools.cxx | 7 ++-----
- 1 file changed, 2 insertions(+), 5 deletions(-)
-
-diff --git a/Rendering/FreeType/vtkFreeTypeTools.cxx b/Rendering/FreeType/vtkFreeTypeTools.cxx
-index c54289dc60..03b899c4da 100644
---- a/Rendering/FreeType/vtkFreeTypeTools.cxx
-+++ b/Rendering/FreeType/vtkFreeTypeTools.cxx
-@@ -378,8 +378,7 @@ FTC_CMapCache* vtkFreeTypeTools::GetCMapCache()
- }
-
- //----------------------------------------------------------------------------
--FT_CALLBACK_DEF(FT_Error)
--vtkFreeTypeToolsFaceRequester(
-+static FT_Error vtkFreeTypeToolsFaceRequester(
- FTC_FaceID face_id, FT_Library lib, FT_Pointer request_data, FT_Face* face)
- {
- #if VTK_FTFC_DEBUG_CD
---
-2.30.1
-
diff --git a/gnu/packages/patches/wacomtablet-add-missing-includes.patch b/gnu/packages/patches/wacomtablet-add-missing-includes.patch
new file mode 100644
index 0000000000..cd93938075
--- /dev/null
+++ b/gnu/packages/patches/wacomtablet-add-missing-includes.patch
@@ -0,0 +1,11 @@
+diff -rup wacomtablet-3.1.0/src/kcmodule/styluspagewidget.cpp wacomtablet-3.1.0.new/src/kcmodule/styluspagewidget.cpp
+--- wacomtablet-3.1.0/src/kcmodule/styluspagewidget.cpp 2018-06-22 15:07:58.000000000 +0200
++++ wacomtablet-3.1.0.new/src/kcmodule/styluspagewidget.cpp 2018-07-10 17:38:09.239242847 +0200
+@@ -25,6 +25,7 @@
+ #include "profilemanagement.h"
+
+ // common includes
++#include "logging.h"
+ #include "property.h"
+ #include "deviceprofile.h"
+ #include "dbustabletinterface.h"
diff --git a/gnu/packages/patches/wacomtablet-qt5.15.patch b/gnu/packages/patches/wacomtablet-qt5.15.patch
new file mode 100644
index 0000000000..598ac939e8
--- /dev/null
+++ b/gnu/packages/patches/wacomtablet-qt5.15.patch
@@ -0,0 +1,23 @@
+From 4f73ff02b3efd5e8728b18fcf1067eca166704ee Mon Sep 17 00:00:00 2001
+From: Christophe Giboudeaux <christophe@krop.fr>
+Date: Thu, 4 Jun 2020 16:00:38 +0200
+Subject: [PATCH] Fix build with Qt 5.15
+
+---
+ src/kcmodule/pressurecurvewidget.cpp | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/kcmodule/pressurecurvewidget.cpp b/src/kcmodule/pressurecurvewidget.cpp
+index 0c943b3..f047a6c 100644
+--- a/src/kcmodule/pressurecurvewidget.cpp
++++ b/src/kcmodule/pressurecurvewidget.cpp
+@@ -22,6 +22,7 @@
+ //Qt includes
+ #include <QDebug>
+ #include <QPainter>
++#include <QPainterPath>
+ #include <QMouseEvent>
+ #include <QResizeEvent>
+ #include <QTabletEvent>
+--
+GitLab
diff --git a/gnu/packages/patches/wdl-link-libs-and-fix-jnetlib.patch b/gnu/packages/patches/wdl-link-libs-and-fix-jnetlib.patch
new file mode 100644
index 0000000000..29ec0f058c
--- /dev/null
+++ b/gnu/packages/patches/wdl-link-libs-and-fix-jnetlib.patch
@@ -0,0 +1,53 @@
+From 2d129502354da4fe39dac13463ea742f8026ab91 Mon Sep 17 00:00:00 2001
+From: Sughosha <sughosha@proton.me>
+Date: Tue, 3 Jan 2023 22:15:09 +0100
+Subject: [PATCH] Link libs and fix jnetlib.
+
+---
+ WDL/eel2/Makefile | 2 +-
+ WDL/jnetlib/Makefile | 2 +-
+ WDL/swell/Makefile | 2 +-
+ 3 files changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/WDL/eel2/Makefile b/WDL/eel2/Makefile
+index ac2e41f1..0ffca97a 100644
+--- a/WDL/eel2/Makefile
++++ b/WDL/eel2/Makefile
+@@ -153,7 +153,7 @@ gen-lex: # the output of this, lex.nseel.c, is unused because we have a handwri
+ $(CXX) $(CXXFLAGS) -c -o $@ $^
+
+ loose_eel: loose_eel.o $(OBJS) $(OBJS2)
+- g++ -o $@ $^ $(CXXFLAGS) $(LFLAGS)
++ g++ -o $@ $^ $(CXXFLAGS) $(LFLAGS) -lGL
+
+ clean:
+ -rm -f -- loose_eel loose_eel.o $(OBJS)
+diff --git a/WDL/jnetlib/Makefile b/WDL/jnetlib/Makefile
+index 10d9fe8a..85570c5a 100644
+--- a/WDL/jnetlib/Makefile
++++ b/WDL/jnetlib/Makefile
+@@ -7,7 +7,7 @@ CC = gcc
+ CPP = g++
+ CXX = g++
+
+-OBJS = asyncdns.o connection.o httpget.o httpserv.o listen.o util.o sercon.o
++OBJS = asyncdns.o connection.o httpget.o httpserv.o listen.o util.o
+
+ jnl.a: ${OBJS}
+ -rm -f jnl.a
+diff --git a/WDL/swell/Makefile b/WDL/swell/Makefile
+index 9e7e2d87..8e98a543 100644
+--- a/WDL/swell/Makefile
++++ b/WDL/swell/Makefile
+@@ -167,7 +167,7 @@ libSwell$(DLL_EXT): $(OBJS)
+ $(CXX) -shared -o $@ $(CFLAGS) $(LFLAGS) $^ $(LINKEXTRA)
+
+ test: $(OBJS) test.o
+- $(CXX) -o test $(CFLAGS) $(LFLAGS) $^ $(LINKEXTRA)
++ $(CXX) -o test $(CFLAGS) $(LFLAGS) $^ $(LINKEXTRA) -lpthread -ldl $(shell $(PKG_CONFIG) --libs gtk+-3.0 freetype2) -lfontconfig -lGL -lXi -lX11
+
+ libSwell.colortheme: swell-gdi-generic.cpp $(SWELL_HEADERS)
+ $(CXX) $(CFLAGS) -o make-theme -DSWELL__MAKE_THEME swell-gdi-generic.cpp -lpthread
+--
+2.38.1
+
diff --git a/gnu/packages/patches/webrtc-for-telegram-desktop-fix-gcc12-cstdint.patch b/gnu/packages/patches/webrtc-for-telegram-desktop-fix-gcc12-cstdint.patch
new file mode 100644
index 0000000000..f1fd29d0d3
--- /dev/null
+++ b/gnu/packages/patches/webrtc-for-telegram-desktop-fix-gcc12-cstdint.patch
@@ -0,0 +1,21 @@
+From 86d2bcd7afb8706663d29e30f65863de5a626142 Mon Sep 17 00:00:00 2001
+From: Xiretza <xiretza@xiretza.xyz>
+Date: Sun, 15 May 2022 12:47:41 +0200
+Subject: [PATCH] fix(h265_pps_parser): fix missing cstdint include
+
+---
+ src/common_video/h265/h265_pps_parser.h | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/common_video/h265/h265_pps_parser.h b/src/common_video/h265/h265_pps_parser.h
+index 28c95ea9..c180b1b9 100644
+--- a/src/common_video/h265/h265_pps_parser.h
++++ b/src/common_video/h265/h265_pps_parser.h
+@@ -12,6 +12,7 @@
+ #define COMMON_VIDEO_H265_PPS_PARSER_H_
+
+ #include "absl/types/optional.h"
++#include <cstdint>
+
+ namespace rtc {
+ class BitBuffer;