diff --git a/.cirrus.yml b/.cirrus.yml index e9c39edca51805..0fe0a8b365d53b 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -47,10 +47,10 @@ compute_credits_template: &CREDITS_TEMPLATE task: - name: 'lint [jammy]' + name: 'lint [bookworm]' << : *BASE_TEMPLATE container: - image: ubuntu:jammy + image: debian:bookworm cpu: 1 memory: 1G # For faster CI feedback, immediately schedule the linters diff --git a/ci/test/00_setup_env.sh b/ci/test/00_setup_env.sh index a9780e21392405..b706481b15aedf 100755 --- a/ci/test/00_setup_env.sh +++ b/ci/test/00_setup_env.sh @@ -45,7 +45,6 @@ export RUN_SECURITY_TESTS=${RUN_SECURITY_TESTS:-false} # might be slow or a reindex might be waiting on disk IO. export TEST_RUNNER_TIMEOUT_FACTOR=${TEST_RUNNER_TIMEOUT_FACTOR:-4} export RUN_FUZZ_TESTS=${RUN_FUZZ_TESTS:-false} -export EXPECTED_TESTS_DURATION_IN_SECONDS=${EXPECTED_TESTS_DURATION_IN_SECONDS:-1000} export CONTAINER_NAME=${CONTAINER_NAME:-ci_unnamed} export DOCKER_NAME_TAG=${DOCKER_NAME_TAG:-ubuntu:20.04} diff --git a/contrib/guix/libexec/build.sh b/contrib/guix/libexec/build.sh index ea1d5079ee3d7e..b44eff598e22ff 100755 --- a/contrib/guix/libexec/build.sh +++ b/contrib/guix/libexec/build.sh @@ -73,11 +73,9 @@ unset OBJCPLUS_INCLUDE_PATH export C_INCLUDE_PATH="${NATIVE_GCC}/include" export CPLUS_INCLUDE_PATH="${NATIVE_GCC}/include/c++:${NATIVE_GCC}/include" -export OBJC_INCLUDE_PATH="${NATIVE_GCC}/include" -export OBJCPLUS_INCLUDE_PATH="${NATIVE_GCC}/include/c++:${NATIVE_GCC}/include" case "$HOST" in - *darwin*) export LIBRARY_PATH="${NATIVE_GCC}/lib" ;; + *darwin*) export LIBRARY_PATH="${NATIVE_GCC}/lib" ;; # Required for qt/qmake *mingw*) export LIBRARY_PATH="${NATIVE_GCC}/lib" ;; *) NATIVE_GCC_STATIC="$(store_path gcc-toolchain static)" @@ -182,6 +180,14 @@ make -C depends --jobs="$JOBS" HOST="$HOST" \ x86_64_linux_NM=x86_64-linux-gnu-gcc-nm \ x86_64_linux_STRIP=x86_64-linux-gnu-strip +case "$HOST" in + *darwin*) + # Unset now that Qt is built + unset C_INCLUDE_PATH + unset CPLUS_INCLUDE_PATH + unset LIBRARY_PATH + ;; +esac ########################### # Source Tarball Building # diff --git a/contrib/guix/manifest.scm b/contrib/guix/manifest.scm index 789792f554fab5..6a6339e8e8db0e 100644 --- a/contrib/guix/manifest.scm +++ b/contrib/guix/manifest.scm @@ -7,7 +7,6 @@ (gnu packages commencement) (gnu packages compression) (gnu packages cross-base) - (gnu packages curl) (gnu packages file) (gnu packages gawk) (gnu packages gcc) @@ -20,7 +19,6 @@ ((gnu packages python) #:select (python-minimal)) ((gnu packages python-build) #:select (python-tomli)) ((gnu packages python-crypto) #:select (python-asn1crypto)) - ((gnu packages python-xyz) #:select (python-altgraph)) ((gnu packages tls) #:select (openssl)) ((gnu packages version-control) #:select (git-minimal)) (guix build-system cmake) @@ -83,11 +81,11 @@ FILE-NAME found in ./patches relative to the current file." (build-system trivial-build-system) (arguments '(#:builder (begin (mkdir %output) #t))) (propagated-inputs - `(("binutils" ,xbinutils) - ("libc" ,xlibc) - ("libc:static" ,xlibc "static") - ("gcc" ,xgcc) - ("gcc-lib" ,xgcc "lib"))) + (list xbinutils + xlibc + xgcc + `(,xlibc "static") + `(,xgcc "lib"))) (synopsis (string-append "Complete GCC tool chain for " target)) (description (string-append "This package provides a complete GCC tool chain for " target " development.")) @@ -150,10 +148,10 @@ desirable for building Dash Core release binaries." (build-system trivial-build-system) (arguments '(#:builder (begin (mkdir %output) #t))) (propagated-inputs - `(("binutils" ,xbinutils) - ("libc" ,pthreads-xlibc) - ("gcc" ,pthreads-xgcc) - ("gcc-lib" ,pthreads-xgcc "lib"))) + (list xbinutils + pthreads-xlibc + pthreads-xgcc + `(,pthreads-xgcc "lib"))) (synopsis (string-append "Complete GCC tool chain for " target)) (description (string-append "This package provides a complete GCC tool chain for " target " development.")) @@ -218,11 +216,7 @@ and abstract ELF, PE and MachO formats.") (base32 "1j47vwq4caxfv0xw68kw5yh00qcpbd56d7rq6c483ma3y7s96yyz")))) (build-system cmake-build-system) - (inputs - `(("openssl", openssl))) - (arguments - '(#:configure-flags - (list "-DCMAKE_DISABLE_FIND_PACKAGE_CURL=TRUE"))) + (inputs (list openssl)) (home-page "https://github.com/mtrojnar/osslsigncode") (synopsis "Authenticode signing and timestamping tool") (description "osslsigncode is a small tool that implements part of the @@ -279,8 +273,7 @@ thus should be able to compile on most platforms where these exist.") (files '("etc/ssl/certs/ca-certificates.crt"))))) (propagated-inputs - `(("python-asn1crypto" ,python-asn1crypto) - ("openssl" ,openssl))) + (list python-asn1crypto openssl)) (arguments `(#:phases (modify-phases %standard-phases @@ -318,7 +311,7 @@ thus should be able to compile on most platforms where these exist.") (package (inherit python-oscrypto) (name "python-oscryptotests") (propagated-inputs - `(("python-oscrypto" ,python-oscrypto))) + (list python-oscrypto)) (arguments `(#:tests? #f #:phases @@ -345,9 +338,9 @@ thus should be able to compile on most platforms where these exist.") "1qw2k7xis53179lpqdqyylbcmp76lj7sagp883wmxg5i7chhc96k")))) (build-system python-build-system) (propagated-inputs - `(("python-asn1crypto" ,python-asn1crypto) - ("python-oscrypto" ,python-oscrypto) - ("python-oscryptotests", python-oscryptotests))) ;; certvalidator tests import oscryptotests + (list python-asn1crypto + python-oscrypto + python-oscryptotests)) ;; certvalidator tests import oscryptotests (arguments `(#:phases (modify-phases %standard-phases @@ -395,56 +388,8 @@ certificates or paths. Supports various options, including: validation at a specific moment in time, whitelisting and revocation checks.") (license license:expat)))) -(define-public python-macholib - (package - (name "python-macholib") - (version "1.14") - (source - (origin - (method git-fetch) - (uri (git-reference - (url "https://github.com/ronaldoussoren/macholib") - (commit (string-append "v" version)))) - (file-name (git-file-name name version)) - (sha256 - (base32 - "0aislnnfsza9wl4f0vp45ivzlc0pzhp9d4r08700slrypn5flg42")))) - (build-system python-build-system) - (propagated-inputs - `(("python-altgraph" ,python-altgraph))) - (arguments - '(#:phases - (modify-phases %standard-phases - (add-after 'unpack 'disable-broken-tests - (lambda _ - ;; This test is broken as there is no keyboard interrupt. - (substitute* "macholib_tests/test_command_line.py" - (("^(.*)class TestCmdLine" line indent) - (string-append indent - "@unittest.skip(\"Disabled by Guix\")\n" - line))) - (substitute* "macholib_tests/test_dyld.py" - (("^(.*)def test_\\S+_find" line indent) - (string-append indent - "@unittest.skip(\"Disabled by Guix\")\n" - line)) - (("^(.*)def testBasic" line indent) - (string-append indent - "@unittest.skip(\"Disabled by Guix\")\n" - line)) - ) - #t))))) - (home-page "https://github.com/ronaldoussoren/macholib") - (synopsis "Python library for analyzing and editing Mach-O headers") - (description "macholib is a Macho-O header analyzer and editor. It's -typically used as a dependency analysis tool, and also to rewrite dylib -references in Mach-O headers to be @executable_path relative. Though this tool -targets a platform specific file format, it is pure python code that is platform -and endian independent.") - (license license:expat))) - (define-public python-signapple - (let ((commit "7a96b4171a360abf0f0f56e499f8f9ed2116280d")) + (let ((commit "62155712e7417aba07565c9780a80e452823ae6a")) (package (name "python-signapple") (version (git-version "0.1" "1" commit)) @@ -457,14 +402,13 @@ and endian independent.") (file-name (git-file-name name commit)) (sha256 (base32 - "0aa4k180jnpal15yhncnm3g3z9gzmi7qb25q5l0kaj444a1p2pm4")))) + "1nm6rm4h4m7kbq729si4cm8rzild62mk4ni8xr5zja7l33fhv3gb")))) (build-system python-build-system) (propagated-inputs - `(("python-asn1crypto" ,python-asn1crypto) - ("python-oscrypto" ,python-oscrypto) - ("python-certvalidator" ,python-certvalidator) - ("python-elfesteem" ,python-elfesteem) - ("python-macholib" ,python-macholib))) + (list python-asn1crypto + python-oscrypto + python-certvalidator + python-elfesteem)) ;; There are no tests, but attempting to run python setup.py test leads to ;; problems, just disable the test (arguments '(#:tests? #f)) @@ -584,7 +528,6 @@ inspecting signatures in Mach-O binaries.") autoconf-2.71 automake pkg-config - bison ;; Scripting python-minimal ;; (3.10) ;; Git @@ -599,7 +542,8 @@ inspecting signatures in Mach-O binaries.") nss-certs osslsigncode)) ((string-contains target "-linux-") - (list (list gcc-toolchain-12 "static") + (list bison + (list gcc-toolchain-12 "static") (make-bitcoin-cross-toolchain target))) ((string-contains target "darwin") (list clang-toolchain-18 diff --git a/contrib/macdeploy/gen-sdk b/contrib/macdeploy/gen-sdk index b73f5cba14c484..86a6262b5ce488 100755 --- a/contrib/macdeploy/gen-sdk +++ b/contrib/macdeploy/gen-sdk @@ -8,21 +8,6 @@ import gzip import os import contextlib -# monkey-patch Python 3.8 and older to fix wrong TAR header handling -# see https://github.com/bitcoin/bitcoin/pull/24534 -# and https://github.com/python/cpython/pull/18080 for more info -if sys.version_info < (3, 9): - _old_create_header = tarfile.TarInfo._create_header - def _create_header(info, format, encoding, errors): - buf = _old_create_header(info, format, encoding, errors) - # replace devmajor/devminor with binary zeroes - buf = buf[:329] + bytes(16) + buf[345:] - # recompute checksum - chksum = tarfile.calc_chksums(buf)[0] - buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:] - return buf - tarfile.TarInfo._create_header = staticmethod(_create_header) - @contextlib.contextmanager def cd(path): """Context manager that restores PWD even if an exception was raised.""" diff --git a/depends/hosts/darwin.mk b/depends/hosts/darwin.mk index 564381d1e9308e..a50e36110dc026 100644 --- a/depends/hosts/darwin.mk +++ b/depends/hosts/darwin.mk @@ -50,17 +50,11 @@ darwin_STRIP=$(shell $(SHELL) $(.SHELLFLAGS) "command -v llvm-strip") # Disable adhoc codesigning (for now) when using LLVM tooling, to avoid # non-determinism issues with the Identifier field. -darwin_CC=env -u C_INCLUDE_PATH -u CPLUS_INCLUDE_PATH \ - -u OBJC_INCLUDE_PATH -u OBJCPLUS_INCLUDE_PATH -u CPATH \ - -u LIBRARY_PATH \ - $(clang_prog) --target=$(host) \ +darwin_CC=$(clang_prog) --target=$(host) \ -isysroot$(OSX_SDK) -nostdlibinc \ -iwithsysroot/usr/include -iframeworkwithsysroot/System/Library/Frameworks -darwin_CXX=env -u C_INCLUDE_PATH -u CPLUS_INCLUDE_PATH \ - -u OBJC_INCLUDE_PATH -u OBJCPLUS_INCLUDE_PATH -u CPATH \ - -u LIBRARY_PATH \ - $(clangxx_prog) --target=$(host) \ +darwin_CXX=$(clangxx_prog) --target=$(host) \ -isysroot$(OSX_SDK) -nostdlibinc \ -iwithsysroot/usr/include/c++/v1 \ -iwithsysroot/usr/include -iframeworkwithsysroot/System/Library/Frameworks diff --git a/depends/packages/qt.mk b/depends/packages/qt.mk index 18c01e7cf0631a..a67feae59d41c5 100644 --- a/depends/packages/qt.mk +++ b/depends/packages/qt.mk @@ -25,6 +25,7 @@ $(package)_patches += memory_resource.patch $(package)_patches += clang_18_libpng.patch $(package)_patches += utc_from_string_no_optimize.patch $(package)_patches += windows_lto.patch +$(package)_patches += darwin_no_libm.patch $(package)_patches += zlib-timebits64.patch $(package)_qttranslations_file_name=qttranslations-$($(package)_suffix) @@ -255,9 +256,9 @@ define $(package)_preprocess_cmds patch -p1 -i $($(package)_patch_dir)/rcc_hardcode_timestamp.patch && \ patch -p1 -i $($(package)_patch_dir)/duplicate_lcqpafonts.patch && \ patch -p1 -i $($(package)_patch_dir)/utc_from_string_no_optimize.patch && \ - patch -p1 -i $($(package)_patch_dir)/fast_fixed_dtoa_no_optimize.patch && \ patch -p1 -i $($(package)_patch_dir)/guix_cross_lib_path.patch && \ patch -p1 -i $($(package)_patch_dir)/windows_lto.patch && \ + patch -p1 -i $($(package)_patch_dir)/darwin_no_libm.patch && \ patch -p1 -i $($(package)_patch_dir)/zlib-timebits64.patch && \ mkdir -p qtbase/mkspecs/macx-clang-linux &&\ cp -f qtbase/mkspecs/macx-clang/qplatformdefs.h qtbase/mkspecs/macx-clang-linux/ &&\ diff --git a/depends/patches/qt/darwin_no_libm.patch b/depends/patches/qt/darwin_no_libm.patch new file mode 100644 index 00000000000000..38a94beeb7a695 --- /dev/null +++ b/depends/patches/qt/darwin_no_libm.patch @@ -0,0 +1,17 @@ +build: remove explicit -lm link from qttools + +This causes issues with at least the macOS cross build, and shouldn't +actually be required anywhere else. GCC with libstdc++ will already get libm. + +--- a/qtbase/src/corelib/tools/tools.pri ++++ b/qtbase/src/corelib/tools/tools.pri +@@ -111,9 +111,6 @@ qtConfig(easingcurve) { + tools/qtimeline.cpp + } + +-# Note: libm should be present by default becaue this is C++ +-unix:!macx-icc:!vxworks:!haiku:!integrity:!wasm: LIBS_PRIVATE += -lm +- + TR_EXCLUDE += ../3rdparty/* + + # MIPS DSP diff --git a/depends/patches/qt/fast_fixed_dtoa_no_optimize.patch b/depends/patches/qt/fast_fixed_dtoa_no_optimize.patch deleted file mode 100644 index d4d6539f56dc4e..00000000000000 --- a/depends/patches/qt/fast_fixed_dtoa_no_optimize.patch +++ /dev/null @@ -1,20 +0,0 @@ -Modify the optimisation flags for FastFixedDtoa. -This fixes a non-determinism issue in the asm produced for -this function when cross-compiling on x86_64 and aarch64 for -the arm-linux-gnueabihf HOST. - ---- a/qtbase/src/3rdparty/double-conversion/fixed-dtoa.h -+++ b/qtbase/src/3rdparty/double-conversion/fixed-dtoa.h -@@ -48,9 +48,12 @@ namespace double_conversion { - // - // This method only works for some parameters. If it can't handle the input it - // returns false. The output is null-terminated when the function succeeds. -+#pragma GCC push_options -+#pragma GCC optimize ("-O1") - bool FastFixedDtoa(double v, int fractional_count, - Vector buffer, int* length, int* decimal_point); - -+#pragma GCC pop_options - } // namespace double_conversion - - #endif // DOUBLE_CONVERSION_FIXED_DTOA_H_ diff --git a/src/addrman.h b/src/addrman.h index d927232b2b9bd0..bd8b0f5d6d0dc8 100644 --- a/src/addrman.h +++ b/src/addrman.h @@ -121,13 +121,16 @@ class AddrMan /** * Attempt to add one or more addresses to addrman's new table. + * If an address already exists in addrman, the existing entry may be updated + * (e.g. adding additional service flags). If the existing entry is in the new table, + * it may be added to more buckets, improving the probability of selection. * * @param[in] vAddr Address records to attempt to add. * @param[in] source The address of the node that sent us these addr records. * @param[in] time_penalty A "time penalty" to apply to the address record's nTime. If a peer * sends us an address record with nTime=n, then we'll add it to our * addrman with nTime=(n - time_penalty). - * @return true if at least one address is successfully added. */ + * @return true if at least one address is successfully added, or added to an additional bucket. Unaffected by updates. */ bool Add(const std::vector& vAddr, const CNetAddr& source, std::chrono::seconds time_penalty = 0s); /** diff --git a/src/init/common.cpp b/src/init/common.cpp index 2c074b50dd138c..edec9345f733bd 100644 --- a/src/init/common.cpp +++ b/src/init/common.cpp @@ -67,7 +67,7 @@ void AddLoggingArgs(ArgsManager& argsman) ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST); argsman.AddArg("-debugexclude=", "Exclude debug and trace logging for a category. Can be used in conjunction with -debug=1 to output debug and trace logging for all categories except the specified category. This option can be specified multiple times to exclude multiple categories.", ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST); argsman.AddArg("-logips", strprintf("Include IP addresses in debug output (default: %u)", DEFAULT_LOGIPS), ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST); - argsman.AddArg("-loglevel=|:", strprintf("Set the global or per-category severity level for logging categories enabled with the -debug configuration option or the logging RPC: %s (default=%s); warning and error levels are always logged. If : is supplied, the setting will override the global one and may be specified multiple times to set multiple category-specific levels. can be: %s.", LogInstance().LogLevelsString(), LogInstance().LogLevelToStr(BCLog::DEFAULT_LOG_LEVEL), LogInstance().LogCategoriesString()), ArgsManager::DISALLOW_NEGATION | ArgsManager::DISALLOW_ELISION | ArgsManager::DEBUG_ONLY, OptionsCategory::DEBUG_TEST); + argsman.AddArg("-loglevel=|:", strprintf("Set the global or per-category severity level for logging categories enabled with the -debug configuration option or the logging RPC. Possible values are %s (default=%s). The following levels are always logged: error, warning, info. If : is supplied, the setting will override the global one and may be specified multiple times to set multiple category-specific levels. can be: %s.", LogInstance().LogLevelsString(), LogInstance().LogLevelToStr(BCLog::DEFAULT_LOG_LEVEL), LogInstance().LogCategoriesString()), ArgsManager::DISALLOW_NEGATION | ArgsManager::DISALLOW_ELISION | ArgsManager::DEBUG_ONLY, OptionsCategory::DEBUG_TEST); argsman.AddArg("-logtimestamps", strprintf("Prepend debug output with timestamp (default: %u)", DEFAULT_LOGTIMESTAMPS), ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST); #ifdef HAVE_THREAD_LOCAL argsman.AddArg("-logthreadnames", strprintf("Prepend debug output with name of the originating thread (only available on platforms supporting thread_local) (default: %u)", DEFAULT_LOGTHREADNAMES), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::DEBUG_TEST); diff --git a/src/leveldb/include/leveldb/status.h b/src/leveldb/include/leveldb/status.h index e3273144e48117..68efe3001a9080 100644 --- a/src/leveldb/include/leveldb/status.h +++ b/src/leveldb/include/leveldb/status.h @@ -103,6 +103,8 @@ class LEVELDB_EXPORT Status { inline Status::Status(const Status& rhs) { state_ = (rhs.state_ == nullptr) ? nullptr : CopyState(rhs.state_); } + +// NOLINTBEGIN(bugprone-unhandled-self-assignment) inline Status& Status::operator=(const Status& rhs) { // The following condition catches both aliasing (when this == &rhs), // and the common case where both rhs and *this are ok. @@ -112,6 +114,8 @@ inline Status& Status::operator=(const Status& rhs) { } return *this; } +// NOLINTEND(bugprone-unhandled-self-assignment) + inline Status& Status::operator=(Status&& rhs) noexcept { std::swap(state_, rhs.state_); return *this; diff --git a/src/net_processing.cpp b/src/net_processing.cpp index e4540de44e0b3f..a9097e6ff1839b 100644 --- a/src/net_processing.cpp +++ b/src/net_processing.cpp @@ -3546,6 +3546,9 @@ void PeerManagerImpl::ProcessMessage( vRecv >> addrMe; if (!pfrom.IsInboundConn()) { + // Overwrites potentially existing services. In contrast to this, + // unvalidated services received via gossip relay in ADDR/ADDRV2 + // messages are only ever added but cannot replace existing ones. m_addrman.SetServices(pfrom.addr, nServices); } if (pfrom.ExpectServicesFromConn() && !HasAllDesirableServiceFlags(nServices)) diff --git a/src/psbt.h b/src/psbt.h index eb38cf74583752..e326c3bbb82cdd 100644 --- a/src/psbt.h +++ b/src/psbt.h @@ -192,7 +192,7 @@ struct PSBTInput if (final_script_sig.empty()) { // Write any partial signatures - for (auto sig_pair : partial_sigs) { + for (const auto& sig_pair : partial_sigs) { SerializeToVector(s, CompactSizeWriter(PSBT_IN_PARTIAL_SIG), Span{sig_pair.second.first}); s << sig_pair.second.second; } diff --git a/src/qt/test/util.cpp b/src/qt/test/util.cpp index 635dbcd1c584eb..7bbd353237b31a 100644 --- a/src/qt/test/util.cpp +++ b/src/qt/test/util.cpp @@ -2,6 +2,8 @@ // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. +#include + #include #include diff --git a/src/qt/test/util.h b/src/qt/test/util.h index f50a6b6c6178af..336be3091579f1 100644 --- a/src/qt/test/util.h +++ b/src/qt/test/util.h @@ -7,6 +7,8 @@ #include +#include + QT_BEGIN_NAMESPACE class QString; QT_END_NAMESPACE diff --git a/src/test/addrman_tests.cpp b/src/test/addrman_tests.cpp index 33fefd9107943a..370334e38266dc 100644 --- a/src/test/addrman_tests.cpp +++ b/src/test/addrman_tests.cpp @@ -1048,7 +1048,7 @@ BOOST_AUTO_TEST_CASE(load_addrman_corrupted) BOOST_AUTO_TEST_CASE(addrman_update_address) { - // Tests updating nTime via Connected() and nServices via SetServices() + // Tests updating nTime via Connected() and nServices via SetServices() and Add() auto addrman = std::make_unique(EMPTY_NETGROUPMAN, DETERMINISTIC, GetCheckRatio(m_node)); CNetAddr source{ResolveIP("252.2.2.2")}; CAddress addr{CAddress(ResolveService("250.1.1.1", 8333), NODE_NONE)}; @@ -1075,6 +1075,32 @@ BOOST_AUTO_TEST_CASE(addrman_update_address) BOOST_CHECK_EQUAL(vAddr2.size(), 1U); BOOST_CHECK(vAddr2.at(0).nTime >= start_time + 10000s); BOOST_CHECK_EQUAL(vAddr2.at(0).nServices, NODE_NETWORK_LIMITED); + + // Updating an existing addr through Add() (used in gossip relay) can add additional services but can't remove existing ones. + CAddress addr_v2{CAddress(ResolveService("250.1.1.1", 8333), NODE_P2P_V2)}; + addr_v2.nTime = start_time; + BOOST_CHECK(!addrman->Add({addr_v2}, source)); + std::vector vAddr3{addrman->GetAddr(/*max_addresses=*/0, /*max_pct=*/0, /*network=*/std::nullopt)}; + BOOST_CHECK_EQUAL(vAddr3.size(), 1U); + BOOST_CHECK_EQUAL(vAddr3.at(0).nServices, NODE_P2P_V2 | NODE_NETWORK_LIMITED); + + // SetServices() (used when we connected to them) overwrites existing service flags + addrman->SetServices(addr, NODE_NETWORK); + std::vector vAddr4{addrman->GetAddr(/*max_addresses=*/0, /*max_pct=*/0, /*network=*/std::nullopt)}; + BOOST_CHECK_EQUAL(vAddr4.size(), 1U); + BOOST_CHECK_EQUAL(vAddr4.at(0).nServices, NODE_NETWORK); + + // Promoting to Tried does not affect the service flags + BOOST_CHECK(addrman->Good(addr)); // addr has NODE_NONE + std::vector vAddr5{addrman->GetAddr(/*max_addresses=*/0, /*max_pct=*/0, /*network=*/std::nullopt)}; + BOOST_CHECK_EQUAL(vAddr5.size(), 1U); + BOOST_CHECK_EQUAL(vAddr5.at(0).nServices, NODE_NETWORK); + + // Adding service flags even works when the addr is in Tried + BOOST_CHECK(!addrman->Add({addr_v2}, source)); + std::vector vAddr6{addrman->GetAddr(/*max_addresses=*/0, /*max_pct=*/0, /*network=*/std::nullopt)}; + BOOST_CHECK_EQUAL(vAddr6.size(), 1U); + BOOST_CHECK_EQUAL(vAddr6.at(0).nServices, NODE_NETWORK | NODE_P2P_V2); } BOOST_AUTO_TEST_CASE(addrman_size) diff --git a/src/validation.cpp b/src/validation.cpp index a7f9cc66e8db56..79b1b81dce3953 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -4456,7 +4456,8 @@ bool CVerifyDB::VerifyDB( int nGoodTransactions = 0; BlockValidationState state; int reportDone = 0; - LogPrintf("[0%%]..."); /* Continued */ + bool skipped_l3_checks{false}; + LogPrintf("Verification progress: 0%%\n"); const bool is_snapshot_cs{chainstate.m_from_snapshot_blockhash}; @@ -4464,7 +4465,7 @@ bool CVerifyDB::VerifyDB( const int percentageDone = std::max(1, std::min(99, (int)(((double)(chainstate.m_chain.Height() - pindex->nHeight)) / (double)nCheckDepth * (nCheckLevel >= 4 ? 50 : 100)))); if (reportDone < percentageDone / 10) { // report every 10% step - LogPrintf("[%d%%]...", percentageDone); /* Continued */ + LogPrintf("Verification progress: %d%%\n", percentageDone); reportDone = percentageDone / 10; } uiInterface.ShowProgress(_("Verifying blocks…").translated, percentageDone, false); @@ -4499,17 +4500,21 @@ bool CVerifyDB::VerifyDB( // check level 3: check for inconsistencies during memory-only disconnect of tip blocks size_t curr_coins_usage = coins.DynamicMemoryUsage() + chainstate.CoinsTip().DynamicMemoryUsage(); - if (nCheckLevel >= 3 && curr_coins_usage <= chainstate.m_coinstip_cache_size_bytes) { - assert(coins.GetBestBlock() == pindex->GetBlockHash()); - DisconnectResult res = chainstate.DisconnectBlock(block, pindex, coins); - if (res == DISCONNECT_FAILED) { - return error("VerifyDB(): *** irrecoverable inconsistency in block data at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); - } - if (res == DISCONNECT_UNCLEAN) { - nGoodTransactions = 0; - pindexFailure = pindex; + if (nCheckLevel >= 3) { + if (curr_coins_usage <= chainstate.m_coinstip_cache_size_bytes) { + assert(coins.GetBestBlock() == pindex->GetBlockHash()); + DisconnectResult res = chainstate.DisconnectBlock(block, pindex, coins); + if (res == DISCONNECT_FAILED) { + return error("VerifyDB(): *** irrecoverable inconsistency in block data at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); + } + if (res == DISCONNECT_UNCLEAN) { + nGoodTransactions = 0; + pindexFailure = pindex; + } else { + nGoodTransactions += block.vtx.size(); + } } else { - nGoodTransactions += block.vtx.size(); + skipped_l3_checks = true; } } if (ShutdownRequested()) return true; @@ -4517,17 +4522,19 @@ bool CVerifyDB::VerifyDB( if (pindexFailure) { return error("VerifyDB(): *** coin database inconsistencies found (last %i blocks, %i good transactions before that)\n", chainstate.m_chain.Height() - pindexFailure->nHeight + 1, nGoodTransactions); } - + if (skipped_l3_checks) { + LogPrintf("Skipped verification of level >=3 (insufficient database cache size). Consider increasing -dbcache.\n"); + } // store block count as we move pindex at check level >= 4 int block_count = chainstate.m_chain.Height() - pindex->nHeight; // check level 4: try reconnecting blocks - if (nCheckLevel >= 4) { + if (nCheckLevel >= 4 && !skipped_l3_checks) { while (pindex != chainstate.m_chain.Tip()) { const int percentageDone = std::max(1, std::min(99, 100 - (int)(((double)(chainstate.m_chain.Height() - pindex->nHeight)) / (double)nCheckDepth * 50))); if (reportDone < percentageDone / 10) { // report every 10% step - LogPrintf("[%d%%]...", percentageDone); /* Continued */ + LogPrintf("Verification progress: %d%%\n", percentageDone); reportDone = percentageDone / 10; } uiInterface.ShowProgress(_("Verifying blocks…").translated, percentageDone, false); @@ -4541,8 +4548,7 @@ bool CVerifyDB::VerifyDB( } } - LogPrintf("[DONE].\n"); - LogPrintf("No coin database inconsistencies in last %i blocks (%i transactions)\n", block_count, nGoodTransactions); + LogPrintf("Verification: No coin database inconsistencies in last %i blocks (%i transactions)\n", block_count, nGoodTransactions); return true; } diff --git a/src/wallet/scriptpubkeyman.cpp b/src/wallet/scriptpubkeyman.cpp index 8b03952ac0c181..61f60d25865c1f 100644 --- a/src/wallet/scriptpubkeyman.cpp +++ b/src/wallet/scriptpubkeyman.cpp @@ -760,8 +760,6 @@ TransactionError LegacyScriptPubKeyMan::FillPSBT(PartiallySignedTransaction& psb // There's no UTXO so we can just skip this now continue; } - SignatureData sigdata; - input.FillSignatureData(sigdata); SignPSBTInput(HidingSigningProvider(this, !sign, !bip32derivs), psbtx, i, &txdata, sighash_type, nullptr, finalize); bool signed_one = PSBTInputSigned(input); @@ -2258,8 +2256,6 @@ TransactionError DescriptorScriptPubKeyMan::FillPSBT(PartiallySignedTransaction& // There's no UTXO so we can just skip this now continue; } - SignatureData sigdata; - input.FillSignatureData(sigdata); std::unique_ptr keys = std::make_unique(); std::unique_ptr script_keys = GetSigningProvider(script, sign); diff --git a/src/wallet/sqlite.cpp b/src/wallet/sqlite.cpp index f41b27d28f71b7..04989cc572fd25 100644 --- a/src/wallet/sqlite.cpp +++ b/src/wallet/sqlite.cpp @@ -36,6 +36,21 @@ static void ErrorLogCallback(void* arg, int code, const char* msg) LogPrintf("SQLite Error. Code: %d. Message: %s\n", code, msg); } +static int TraceSqlCallback(unsigned code, void* context, void* param1, void* param2) +{ + auto* db = static_cast(context); + if (code == SQLITE_TRACE_STMT) { + auto* stmt = static_cast(param1); + // To be conservative and avoid leaking potentially secret information + // in the log file, only expand statements that query the database, not + // statements that update the database. + char* expanded{sqlite3_stmt_readonly(stmt) ? sqlite3_expanded_sql(stmt) : nullptr}; + LogPrintf("[%s] SQLite Statement: %s\n", db->Filename(), expanded ? expanded : sqlite3_sql(stmt)); + if (expanded) sqlite3_free(expanded); + } + return SQLITE_OK; +} + static bool BindBlobToStatement(sqlite3_stmt* stmt, int index, Span blob, @@ -232,6 +247,13 @@ void SQLiteDatabase::Open() if (ret != SQLITE_OK) { throw std::runtime_error(strprintf("SQLiteDatabase: Failed to enable extended result codes: %s\n", sqlite3_errstr(ret))); } + // Trace SQL statements if tracing is enabled with -debug=walletdb -loglevel=walletdb:trace + if (LogAcceptCategory(BCLog::WALLETDB, BCLog::Level::Trace)) { + ret = sqlite3_trace_v2(m_db, SQLITE_TRACE_STMT, TraceSqlCallback, this); + if (ret != SQLITE_OK) { + LogPrintf("Failed to enable SQL tracing for %s\n", Filename()); + } + } } if (sqlite3_db_readonly(m_db, "main") != 0) { diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py index 5e188dce4ca5af..93c461d7125219 100755 --- a/test/functional/wallet_backup.py +++ b/test/functional/wallet_backup.py @@ -134,11 +134,6 @@ def restore_wallet_existent_name(self): assert_raises_rpc_error(-36, error_message, node.restorewallet, wallet_name, backup_file) assert os.path.exists(wallet_file) - def init_three(self): - self.init_wallet(node=0) - self.init_wallet(node=1) - self.init_wallet(node=2) - def run_test(self): self.log.info("Generating initial blockchain") self.generate(self.nodes[0], 1) @@ -230,7 +225,10 @@ def run_test(self): shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'llmq')) self.start_three(["-nowallet"]) - self.init_three() + # Create new wallets for the three nodes. + # We will use this empty wallets to test the 'importwallet()' RPC command below. + for node_num in range(3): + self.nodes[node_num].createwallet(wallet_name=self.default_wallet_name, descriptors=self.options.descriptors, load_on_startup=True) assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[1].getbalance(), 0)