diff --git a/build.zig b/build.zig index 6e6450359..6cef07484 100644 --- a/build.zig +++ b/build.zig @@ -38,14 +38,14 @@ pub fn build(b: *Build) void { // Dependencies const dep_opts = .{ .target = target, .optimize = optimize }; - const base58_dep = b.dependency("base58-zig", dep_opts); - const base58_module = base58_dep.module("base58-zig"); + const base58_dep = b.dependency("base58", dep_opts); + const base58_mod = base58_dep.module("base58"); const zig_network_dep = b.dependency("zig-network", dep_opts); - const zig_network_module = zig_network_dep.module("network"); + const zig_network_mod = zig_network_dep.module("network"); const zig_cli_dep = b.dependency("zig-cli", dep_opts); - const zig_cli_module = zig_cli_dep.module("zig-cli"); + const zig_cli_mod = zig_cli_dep.module("zig-cli"); const zstd_dep = b.dependency("zstd", dep_opts); const zstd_mod = zstd_dep.module("zstd"); @@ -72,9 +72,9 @@ pub fn build(b: *Build) void { sig_mod.addOptions("build-options", build_options); - sig_mod.addImport("zig-network", zig_network_module); - sig_mod.addImport("base58-zig", base58_module); - sig_mod.addImport("zig-cli", zig_cli_module); + sig_mod.addImport("zig-network", zig_network_mod); + sig_mod.addImport("base58", base58_mod); + sig_mod.addImport("zig-cli", zig_cli_mod); sig_mod.addImport("zstd", zstd_mod); switch (blockstore_db) { .rocksdb => sig_mod.addImport("rocksdb", rocksdb_mod), @@ -100,9 +100,9 @@ pub fn build(b: *Build) void { sig_exe.linkLibC(); sig_exe.root_module.addOptions("build-options", build_options); - sig_exe.root_module.addImport("base58-zig", base58_module); - sig_exe.root_module.addImport("zig-cli", zig_cli_module); - sig_exe.root_module.addImport("zig-network", zig_network_module); + sig_exe.root_module.addImport("base58", base58_mod); + sig_exe.root_module.addImport("zig-cli", zig_cli_mod); + sig_exe.root_module.addImport("zig-network", zig_network_mod); sig_exe.root_module.addImport("zstd", zstd_mod); sig_exe.root_module.addImport("lsquic", lsquic_mod); sig_exe.root_module.addImport("ssl", ssl_mod); @@ -138,8 +138,8 @@ pub fn build(b: *Build) void { unit_tests_exe.linkLibC(); unit_tests_exe.root_module.addOptions("build-options", build_options); - unit_tests_exe.root_module.addImport("base58-zig", base58_module); - unit_tests_exe.root_module.addImport("zig-network", zig_network_module); + unit_tests_exe.root_module.addImport("base58", base58_mod); + unit_tests_exe.root_module.addImport("zig-network", zig_network_mod); unit_tests_exe.root_module.addImport("zstd", zstd_mod); switch (blockstore_db) { .rocksdb => unit_tests_exe.root_module.addImport("rocksdb", rocksdb_mod), @@ -171,8 +171,8 @@ pub fn build(b: *Build) void { fuzz_exe.linkLibC(); fuzz_exe.root_module.addOptions("build-options", build_options); - fuzz_exe.root_module.addImport("base58-zig", base58_module); - fuzz_exe.root_module.addImport("zig-network", zig_network_module); + fuzz_exe.root_module.addImport("base58", base58_mod); + fuzz_exe.root_module.addImport("zig-network", zig_network_mod); fuzz_exe.root_module.addImport("zstd", zstd_mod); switch (blockstore_db) { .rocksdb => fuzz_exe.root_module.addImport("rocksdb", rocksdb_mod), @@ -205,8 +205,8 @@ pub fn build(b: *Build) void { benchmark_exe.linkLibC(); benchmark_exe.root_module.addOptions("build-options", build_options); - benchmark_exe.root_module.addImport("base58-zig", base58_module); - benchmark_exe.root_module.addImport("zig-network", zig_network_module); + benchmark_exe.root_module.addImport("base58", base58_mod); + benchmark_exe.root_module.addImport("zig-network", zig_network_mod); benchmark_exe.root_module.addImport("zstd", zstd_mod); benchmark_exe.root_module.addImport("prettytable", pretty_table_mod); switch (blockstore_db) { @@ -238,7 +238,7 @@ pub fn build(b: *Build) void { install_step.dependOn(&geyser_reader_exe.step); geyser_reader_exe.root_module.addImport("sig", sig_mod); - geyser_reader_exe.root_module.addImport("zig-cli", zig_cli_module); + geyser_reader_exe.root_module.addImport("zig-cli", zig_cli_mod); if (!no_bin) { const geyser_reader_install = b.addInstallArtifact(geyser_reader_exe, .{}); diff --git a/build.zig.zon b/build.zig.zon index 2c5177b2f..c3690c761 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -7,10 +7,6 @@ "src", }, .dependencies = .{ - .@"base58-zig" = .{ - .url = "https://github.com/Syndica/base58-zig/archive/e1001fbe8b41eed36d81e37931ada66b784e51dc.tar.gz", - .hash = "12206e5050a03cd9dcb896781de0cf541081488006532675371653f61d00c1f27433", - }, .@"zig-network" = .{ .url = "https://github.com/ikskuh/zig-network/archive/d4506cd1950de8a6e6d3f28aed68a0b4f64e0b44.tar.gz", .hash = "122080b2675fa7fdd7410af5f62942dec99e0ea766827c8c6506e6672b575fb0a50e", @@ -39,5 +35,9 @@ .url = "https://github.com/dying-will-bullet/prettytable-zig/archive/46b6ad9b5970def35fa43c9613cd244f28862fa9.tar.gz", .hash = "122098d444c9c7112c66481e7655bb5389829c67e04b280a029200545e1971187443", }, + .base58 = .{ + .url = "git+https://github.com/Syndica/base58-zig#e6337e7eb3dc7ca77cad4c3b2fe4a34357bd50ea", + .hash = "122092353c0b494c2a4968c10a527dc90d6e89eaac48637fda1a0af3efd948bf4ee3", + }, }, } diff --git a/src/accountsdb/snapshots.zig b/src/accountsdb/snapshots.zig index 515eacb35..1c8657903 100644 --- a/src/accountsdb/snapshots.zig +++ b/src/accountsdb/snapshots.zig @@ -3,7 +3,7 @@ const std = @import("std"); const zstd = @import("zstd"); const sig = @import("../sig.zig"); -const base58 = @import("base58-zig"); +const base58 = @import("base58"); const bincode = sig.bincode; @@ -349,7 +349,7 @@ test "deserialize VoteState.node_pubkey" { 90, 174, 158, 6, 199, 179, 134, 194, 112, 248, 166, 232, 144, 253, 128, 249, 67, 118, } ++ .{0} ** 1586 ++ .{ 31, 0, 0, 0, 0, 0, 0, 0, 1 } ++ .{0} ** 24; const vote_state = try bincode.readFromSlice(undefined, VoteState, &bytes, .{}); - const expected_pubkey = try Pubkey.fromString("55abJrqFnjm7ZRB1noVdh7BzBe3bBSMFT3pt16mw6Vad"); + const expected_pubkey = try Pubkey.parseBase58String("55abJrqFnjm7ZRB1noVdh7BzBe3bBSMFT3pt16mw6Vad"); try std.testing.expect(expected_pubkey.equals(&vote_state.node_pubkey)); } @@ -2219,7 +2219,7 @@ pub const FullSnapshotFileInfo = struct { pub const SnapshotArchiveNameStr = SnapshotArchiveNameFmtSpec.BoundedArrayValue(.{ .slot = std.math.maxInt(Slot), - .hash = sig.utils.fmt.boundedString(&(Hash{ .data = .{255} ** 32 }).base58String()), + .hash = "1" ** Hash.BASE58_MAX_SIZE, }); pub fn snapshotArchiveName(self: FullSnapshotFileInfo) SnapshotArchiveNameStr { @@ -2301,11 +2301,10 @@ pub const FullSnapshotFileInfo = struct { return error.MissingHash; } - const str_max_len = Hash.base58_max_encoded_size; - const end_max = @max(filename.len, start + str_max_len + 1); + const str_max_len = Hash.BASE58_MAX_SIZE; + const end_max = @min(filename.len, start + str_max_len + 1); const filename_truncated = filename[0..end_max]; - // TODO: accessing it this way is dirty, the base58 API should be improved - const alphabet = &base58.Alphabet.DEFAULT.encode; + const alphabet = std.mem.asBytes(&base58.Table.BITCOIN.alphabet); const end = std.mem.indexOfNonePos(u8, filename_truncated, start + 1, alphabet) orelse filename_truncated.len; @@ -2347,7 +2346,7 @@ pub const IncrementalSnapshotFileInfo = struct { pub const SnapshotArchiveNameStr = SnapshotArchiveNameFmtSpec.BoundedArrayValue(.{ .base_slot = std.math.maxInt(Slot), .slot = std.math.maxInt(Slot), - .hash = sig.utils.fmt.boundedString(&(Hash{ .data = .{255} ** 32 }).base58String()), + .hash = "1" ** Hash.BASE58_MAX_SIZE, }); pub fn snapshotArchiveName(self: IncrementalSnapshotFileInfo) SnapshotArchiveNameStr { @@ -2458,11 +2457,10 @@ pub const IncrementalSnapshotFileInfo = struct { return error.MissingHash; } - const str_max_len = Hash.base58_max_encoded_size; - const end_max = @max(filename.len, start + str_max_len + 1); + const str_max_len = Hash.BASE58_MAX_SIZE; + const end_max = @min(filename.len, start + str_max_len + 1); const filename_truncated = filename[0..end_max]; - // TODO: accessing it this way is dirty, the base58 API should be improved - const alphabet = &base58.Alphabet.DEFAULT.encode; + const alphabet = std.mem.asBytes(&base58.Table.BITCOIN.alphabet); const end = std.mem.indexOfNonePos(u8, filename_truncated, start + 1, alphabet) orelse filename_truncated.len; diff --git a/src/accountsdb/sysvars.zig b/src/accountsdb/sysvars.zig index 538e1346f..2cdb3a527 100644 --- a/src/accountsdb/sysvars.zig +++ b/src/accountsdb/sysvars.zig @@ -24,14 +24,14 @@ pub const SlotCheckResult = enum { Future, TooOld, Found, NotFound }; /// /// Analogous to [SysvarCache](https://github.com/anza-xyz/agave/blob/ebd063eb79c6e2f14da660ccfc90f1d4c0b7db1f/program-runtime/src/sysvar_cache.rs#L28) pub const IDS = struct { - pub const clock = Pubkey.fromString("SysvarC1ock11111111111111111111111111111111") catch unreachable; - pub const epoch_schedule = Pubkey.fromString("SysvarEpochSchedu1e111111111111111111111111") catch unreachable; - pub const epoch_rewards = Pubkey.fromString("SysvarEpochRewards1111111111111111111111111") catch unreachable; - pub const rent = Pubkey.fromString("SysvarRent111111111111111111111111111111111") catch unreachable; - pub const slot_hashes = Pubkey.fromString("SysvarS1otHashes111111111111111111111111111") catch unreachable; - pub const slot_history = Pubkey.fromString("SysvarS1otHistory11111111111111111111111111") catch unreachable; - pub const stake_history = Pubkey.fromString("SysvarStakeHistory1111111111111111111111111") catch unreachable; - pub const last_restart_slot = Pubkey.fromString("SysvarLastRestartS1ot1111111111111111111111") catch unreachable; + pub const clock = Pubkey.parseBase58String("SysvarC1ock11111111111111111111111111111111") catch unreachable; + pub const epoch_schedule = Pubkey.parseBase58String("SysvarEpochSchedu1e111111111111111111111111") catch unreachable; + pub const epoch_rewards = Pubkey.parseBase58String("SysvarEpochRewards1111111111111111111111111") catch unreachable; + pub const rent = Pubkey.parseBase58String("SysvarRent111111111111111111111111111111111") catch unreachable; + pub const slot_hashes = Pubkey.parseBase58String("SysvarS1otHashes111111111111111111111111111") catch unreachable; + pub const slot_history = Pubkey.parseBase58String("SysvarS1otHistory11111111111111111111111111") catch unreachable; + pub const stake_history = Pubkey.parseBase58String("SysvarStakeHistory1111111111111111111111111") catch unreachable; + pub const last_restart_slot = Pubkey.parseBase58String("SysvarLastRestartS1ot1111111111111111111111") catch unreachable; }; /// Analogous to [Clock](https://github.com/anza-xyz/agave/blob/fc2a8794be2526e9fd6cdbc9b304c055b2d9cc57/sdk/program/src/clock.rs#L180) diff --git a/src/cmd.zig b/src/cmd.zig index 437cdca6d..00e199225 100644 --- a/src/cmd.zig +++ b/src/cmd.zig @@ -1641,7 +1641,7 @@ fn getTrustedValidators(allocator: std.mem.Allocator) !?std.ArrayList(Pubkey) { ); for (current_config.gossip.trusted_validators) |trusted_validator_str| { trusted_validators.?.appendAssumeCapacity( - try Pubkey.fromString(trusted_validator_str), + try Pubkey.parseBase58String(trusted_validator_str), ); } } diff --git a/src/core/hash.zig b/src/core/hash.zig index ce6372268..2da251650 100644 --- a/src/core/hash.zig +++ b/src/core/hash.zig @@ -1,9 +1,10 @@ const std = @import("std"); const sig = @import("../sig.zig"); +const base58 = @import("base58"); +const BASE58_ENDEC = base58.Table.BITCOIN; const Sha256 = std.crypto.hash.sha2.Sha256; const Slot = sig.core.time.Slot; -const Allocator = std.mem.Allocator; pub const SlotAndHash = struct { slot: Slot, @@ -27,55 +28,56 @@ pub const SlotAndHash = struct { }; pub const Hash = extern struct { - data: [size]u8, + data: [SIZE]u8, - pub const size = 32; + pub const SIZE = 32; - pub const ZEROES: Hash = .{ .data = .{0} ** size }; - - const base58 = sig.crypto.base58.Base58Sized(size); - - pub fn fromSizedSlice(data: *const [size]u8) Hash { - var hash: Hash = undefined; - @memcpy(&hash.data, data); - return hash; - } + pub const ZEROES: Hash = .{ .data = .{0} ** SIZE }; pub fn generateSha256Hash(bytes: []const u8) Hash { - var data: [size]u8 = undefined; + var data: [SIZE]u8 = undefined; Sha256.hash(bytes, &data, .{}); return .{ .data = data }; } - pub fn extendAndHash(id: Hash, val: []const u8) Hash { + pub fn extendAndHash(self: Hash, val: []const u8) Hash { var hasher = Sha256.init(.{}); - hasher.update(&id.data); + hasher.update(&self.data); hasher.update(val); return .{ .data = hasher.finalResult() }; } pub fn eql(self: Hash, other: Hash) bool { - const xx: @Vector(size, u8) = self.data; - const yy: @Vector(size, u8) = other.data; + const xx: @Vector(SIZE, u8) = self.data; + const yy: @Vector(SIZE, u8) = other.data; return @reduce(.And, xx == yy); } - pub fn order(a: *const Hash, b: *const Hash) std.math.Order { - for (a.data, b.data) |a_byte, b_byte| { - if (a_byte > b_byte) return .gt; - if (a_byte < b_byte) return .lt; - } - return .eq; + pub fn order(self: *const Hash, other: *const Hash) std.math.Order { + return for (self.data, other.data) |a_byte, b_byte| { + if (a_byte > b_byte) break .gt; + if (a_byte < b_byte) break .lt; + } else .eq; } pub fn parseBase58String(str: []const u8) error{InvalidHash}!Hash { - return .{ .data = base58.decode(str) catch return error.InvalidHash }; + if (str.len > BASE58_MAX_SIZE) return error.InvalidHash; + var encoded: std.BoundedArray(u8, BASE58_MAX_SIZE) = .{}; + encoded.appendSliceAssumeCapacity(str); + + if (@inComptime()) @setEvalBranchQuota(str.len * str.len * str.len); + const decoded = BASE58_ENDEC.decodeBounded(BASE58_MAX_SIZE, encoded) catch { + return error.InvalidHash; + }; + + if (decoded.len != SIZE) return error.InvalidHash; + return .{ .data = decoded.constSlice()[0..SIZE].* }; } - pub const base58_max_encoded_size = base58.max_encoded_size; - pub const Base58String = std.BoundedArray(u8, base58_max_encoded_size); + pub const BASE58_MAX_SIZE = base58.encodedMaxSize(SIZE); + pub const Base58String = std.BoundedArray(u8, BASE58_MAX_SIZE); pub fn base58String(self: Hash) Base58String { - return base58.encode(self.data); + return BASE58_ENDEC.encodeArray(SIZE, self.data); } pub fn format( @@ -83,17 +85,14 @@ pub const Hash = extern struct { comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype, - ) !void { - return base58.format(self.data, writer); - } - - pub fn base58EncodeAlloc(self: Hash, allocator: Allocator) Allocator.Error![]const u8 { - return base58.encodeAlloc(self.data, allocator); + ) @TypeOf(writer).Error!void { + const str = self.base58String(); + return writer.writeAll(str.constSlice()); } /// Intended to be used in tests. pub fn initRandom(random: std.Random) Hash { - var data: [size]u8 = undefined; + var data: [SIZE]u8 = undefined; random.bytes(&data); return .{ .data = data }; } diff --git a/src/core/leader_schedule.zig b/src/core/leader_schedule.zig index c21815932..c1360d37d 100644 --- a/src/core/leader_schedule.zig +++ b/src/core/leader_schedule.zig @@ -136,7 +136,7 @@ pub const LeaderSchedule = struct { var rpc_leader_iter = leader_to_slots.iterator(); while (rpc_leader_iter.next()) |entry| { - const key = try Pubkey.fromString(entry.key_ptr.*); + const key = try Pubkey.parseBase58String(entry.key_ptr.*); for (entry.value_ptr.*) |slot| { leaders[leaders_index] = .{ .slot = slot, .key = key }; leaders_index += 1; @@ -251,7 +251,7 @@ pub const LeaderSchedule = struct { start_slot = slot; } const node_str = nextNonEmpty(&word_iter) orelse return error.MissingPubkey; - try slot_leaders.append(try Pubkey.fromString(node_str)); + try slot_leaders.append(try Pubkey.parseBase58String(node_str)); } return .{ @@ -287,7 +287,10 @@ test "leaderSchedule calculation matches agave" { const slot_leaders = try LeaderSchedule.fromStakedNodes(std.testing.allocator, 123, 321, &staked_nodes.unmanaged); defer std.testing.allocator.free(slot_leaders); for (slot_leaders, 0..) |slot_leader, i| { - try std.testing.expect((try Pubkey.fromString(generated_leader_schedule[i])).equals(&slot_leader)); + try std.testing.expectEqual( + slot_leader, + try Pubkey.parseBase58String(generated_leader_schedule[i]), + ); } } @@ -308,17 +311,17 @@ test "parseLeaderSchedule writeLeaderSchedule happy path roundtrip" { \\ ; const expected_nodes = [_]Pubkey{ - try Pubkey.fromString("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), - try Pubkey.fromString("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), - try Pubkey.fromString("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), - try Pubkey.fromString("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), - try Pubkey.fromString("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), - try Pubkey.fromString("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), - try Pubkey.fromString("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), - try Pubkey.fromString("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), - try Pubkey.fromString("DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP"), - try Pubkey.fromString("DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP"), - try Pubkey.fromString("DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP"), + try Pubkey.parseBase58String("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), + try Pubkey.parseBase58String("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), + try Pubkey.parseBase58String("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), + try Pubkey.parseBase58String("Fd7btgySsrjuo25CJCj7oE7VPMyezDhnx7pZkj2v69Nk"), + try Pubkey.parseBase58String("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), + try Pubkey.parseBase58String("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), + try Pubkey.parseBase58String("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), + try Pubkey.parseBase58String("GBuP6xK2zcUHbQuUWM4gbBjom46AomsG8JzSp1bzJyn8"), + try Pubkey.parseBase58String("DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP"), + try Pubkey.parseBase58String("DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP"), + try Pubkey.parseBase58String("DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP"), }; // const expected_start = 270864000; diff --git a/src/core/pubkey.zig b/src/core/pubkey.zig index 820a57ee6..21dda2618 100644 --- a/src/core/pubkey.zig +++ b/src/core/pubkey.zig @@ -1,52 +1,53 @@ const std = @import("std"); const sig = @import("../sig.zig"); - -const Allocator = std.mem.Allocator; -const ParseOptions = std.json.ParseOptions; +const base58 = @import("base58"); +const BASE58_ENDEC = base58.Table.BITCOIN; pub const Pubkey = extern struct { - data: [size]u8, - const Self = @This(); - - pub const size = 32; - - pub const ZEROES: Pubkey = .{ .data = .{0} ** size }; + data: [SIZE]u8, - const base58 = sig.crypto.base58.Base58Sized(size); + pub const SIZE = 32; - pub fn fromString(str: []const u8) !Self { - return .{ .data = try base58.decode(str) }; - } - - pub fn fromBytes(bytes: []const u8) !Self { - if (bytes.len != size) { - return Error.InvalidBytesLength; - } - return .{ .data = bytes[0..size].* }; - } + pub const ZEROES: Pubkey = .{ .data = .{0} ** SIZE }; - pub fn fromPublicKey(public_key: *const std.crypto.sign.Ed25519.PublicKey) Self { + pub fn fromPublicKey(public_key: *const std.crypto.sign.Ed25519.PublicKey) Pubkey { return .{ .data = public_key.bytes }; } - pub fn initRandom(random: std.Random) Self { - var bytes: [size]u8 = undefined; + pub fn initRandom(random: std.Random) Pubkey { + var bytes: [SIZE]u8 = undefined; random.bytes(&bytes); return .{ .data = bytes }; } - pub fn equals(self: *const Self, other: *const Pubkey) bool { - const xx: @Vector(size, u8) = self.data; - const yy: @Vector(size, u8) = other.data; + pub fn equals(self: *const Pubkey, other: *const Pubkey) bool { + const xx: @Vector(SIZE, u8) = self.data; + const yy: @Vector(SIZE, u8) = other.data; return @reduce(.And, xx == yy); } - pub fn isZeroed(self: *const Self) bool { + pub fn isZeroed(self: *const Pubkey) bool { return self.equals(&ZEROES); } - pub fn string(self: Self) base58.String { - return base58.encode(self.data); + pub fn parseBase58String(str: []const u8) error{InvalidPubkey}!Pubkey { + if (str.len > BASE58_MAX_SIZE) return error.InvalidPubkey; + var encoded: std.BoundedArray(u8, BASE58_MAX_SIZE) = .{}; + encoded.appendSliceAssumeCapacity(str); + + if (@inComptime()) @setEvalBranchQuota(str.len * str.len * str.len); + const decoded = BASE58_ENDEC.decodeBounded(BASE58_MAX_SIZE, encoded) catch { + return error.InvalidPubkey; + }; + + if (decoded.len != SIZE) return error.InvalidPubkey; + return .{ .data = decoded.constSlice()[0..SIZE].* }; + } + + pub const BASE58_MAX_SIZE = base58.encodedMaxSize(SIZE); + pub const Base58String = std.BoundedArray(u8, BASE58_MAX_SIZE); + pub fn base58String(self: Pubkey) Base58String { + return BASE58_ENDEC.encodeArray(SIZE, self.data); } pub fn format( @@ -55,12 +56,17 @@ pub const Pubkey = extern struct { _: std.fmt.FormatOptions, writer: anytype, ) !void { - return base58.format(self.data, writer); + const str = self.base58String(); + return writer.writeAll(str.constSlice()); } - pub fn jsonParse(_: Allocator, source: anytype, _: ParseOptions) !Pubkey { + pub fn jsonParse( + _: std.mem.Allocator, + source: anytype, + _: std.json.ParseOptions, + ) std.json.ParseError(@TypeOf(source.*))!Pubkey { return switch (try source.next()) { - .string => |s| .{ .data = base58.decode(s) catch return error.UnexpectedToken }, + .string => |str| parseBase58String(str) catch error.UnexpectedToken, else => error.UnexpectedToken, }; } diff --git a/src/core/signature.zig b/src/core/signature.zig index de40a10d5..3b481da2a 100644 --- a/src/core/signature.zig +++ b/src/core/signature.zig @@ -1,6 +1,8 @@ const std = @import("std"); const sig = @import("../sig.zig"); const core = @import("lib.zig"); +const base58 = @import("base58"); +const BASE58_ENDEC = base58.Table.BITCOIN; const Ed25519 = std.crypto.sign.Ed25519; const Verifier = std.crypto.sign.Ed25519.Verifier; @@ -9,59 +11,58 @@ const e = std.crypto.errors; const Pubkey = core.Pubkey; pub const Signature = struct { - data: [size]u8 = [_]u8{0} ** size, + data: [SIZE]u8, - pub const size: usize = 64; + pub const SIZE: usize = 64; - const base58 = sig.crypto.base58.Base58Sized(size); - const Self = @This(); - - pub fn default() Self { - return .{ .data = [_]u8{0} ** size }; - } - - pub fn init(bytes: [size]u8) Self { - return .{ .data = bytes }; - } - - pub fn fromString(str: []const u8) !Self { - return .{ .data = try base58.decode(str) }; - } + pub const ZEROES: Signature = .{ .data = .{0} ** SIZE }; + pub const VerifyError = e.NonCanonicalError; pub fn verify( - self: Self, + self: Signature, pubkey: Pubkey, msg: []const u8, - ) e.NonCanonicalError!bool { + ) VerifyError!bool { const signature = Ed25519.Signature.fromBytes(self.data); const byte_pubkey = try Ed25519.PublicKey.fromBytes(pubkey.data); signature.verify(msg, byte_pubkey) catch return false; return true; } + pub const VerifierError = + e.NonCanonicalError || + e.EncodingError || + e.IdentityElementError; pub fn verifier( - self: Self, + self: Signature, pubkey: Pubkey, - ) (e.NonCanonicalError || - e.EncodingError || - e.IdentityElementError)!Verifier { + ) VerifierError!Verifier { const signature = Ed25519.Signature.fromBytes(self.data); return signature.verifier(try Ed25519.PublicKey.fromBytes(pubkey.data)); } - pub fn eql(self: *const Self, other: *const Self) bool { + pub fn eql(self: *const Signature, other: *const Signature) bool { return std.mem.eql(u8, self.data[0..], other.data[0..]); } - pub fn base58String(self: Signature) std.BoundedArray(u8, 88) { - return base58.encode(self.data); + pub fn parseBase58String(str: []const u8) error{InvalidSignature}!Signature { + if (str.len > BASE58_MAX_SIZE) return error.InvalidSignature; + var encoded: std.BoundedArray(u8, BASE58_MAX_SIZE) = .{}; + encoded.appendSliceAssumeCapacity(str); + + if (@inComptime()) @setEvalBranchQuota(str.len * str.len * str.len); + const decoded = BASE58_ENDEC.decodeBounded(BASE58_MAX_SIZE, encoded) catch { + return error.InvalidSignature; + }; + + if (decoded.len != SIZE) return error.InvalidSignature; + return .{ .data = decoded.constSlice()[0..SIZE].* }; } - pub fn base58StringAlloc( - self: Signature, - allocator: std.mem.Allocator, - ) std.mem.Allocator.Error![]const u8 { - return base58.encodeAlloc(self.data, allocator); + pub const BASE58_MAX_SIZE = base58.encodedMaxSize(SIZE); + pub const Base58String = std.BoundedArray(u8, BASE58_MAX_SIZE); + pub fn base58String(self: Signature) Base58String { + return BASE58_ENDEC.encodeArray(SIZE, self.data); } pub fn format( @@ -70,10 +71,11 @@ pub const Signature = struct { _: std.fmt.FormatOptions, writer: anytype, ) !void { - return base58.format(self.data, writer); + const str = self.base58String(); + return writer.writeAll(str.constSlice()); } - pub fn jsonStringify(self: Signature, writer: anytype) !void { + pub fn jsonStringify(self: Signature, writer: anytype) @TypeOf(writer.*).Error!void { try writer.print("\"{s}\"", .{self.base58String().slice()}); } }; diff --git a/src/core/transaction.zig b/src/core/transaction.zig index 425450e15..be3e204e6 100644 --- a/src/core/transaction.zig +++ b/src/core/transaction.zig @@ -516,7 +516,7 @@ pub const CompileError = error{ UnknownInstructionKey, }; -const SYSTEM_PROGRAM_ID = Pubkey{ .data = [_]u8{0} ** Pubkey.size }; +const SYSTEM_PROGRAM_ID = Pubkey.ZEROES; const SystemInstruction = union(enum(u8)) { CreateAccount, @@ -551,7 +551,7 @@ pub fn buildTransferTansaction( var signatures = try allocator.alloc(Signature, 1); var noise: [KeyPair.seed_length]u8 = undefined; random.bytes(noise[0..]); - signatures[0] = Signature.init((try from_keypair.sign(message_bytes, noise)).toBytes()); + signatures[0] = .{ .data = (try from_keypair.sign(message_bytes, noise)).toBytes() }; return .{ .signatures = signatures, @@ -611,8 +611,8 @@ test "create transfer transaction" { const random = prng.random(); const from_keypair = try KeyPair.create([_]u8{0} ** KeyPair.seed_length); - const to_pubkey = Pubkey{ .data = [_]u8{1} ** Pubkey.size }; - const recent_blockhash = Hash.generateSha256Hash(&[_]u8{0}); + const to_pubkey: Pubkey = .{ .data = .{1} ** Pubkey.SIZE }; + const recent_blockhash = Hash.generateSha256Hash(&.{0}); const tx = try buildTransferTansaction( allocator, random, @@ -765,12 +765,12 @@ test "VersionedMessage v0 serialization and deserialization" { } pub const test_v0_transaction = struct { - pub const as_struct = VersionedTransaction{ + pub const as_struct: VersionedTransaction = .{ .signatures = &.{ - Signature.fromString( + Signature.parseBase58String( "2cxn1LdtB7GcpeLEnHe5eA7LymTXKkqGF6UvmBM2EtttZEeqBREDaAD7LCagDFHyuc3xXxyDkMPiy3CpK5m6Uskw", ) catch unreachable, - Signature.fromString( + Signature.parseBase58String( "4gr9L7K3bALKjPRiRSk4JDB3jYmNaauf6rewNV3XFubX5EHxBn98gqBGhbwmZAB9DJ2pv8GWE1sLoYqhhLbTZcLj", ) catch unreachable, }, @@ -824,8 +824,8 @@ pub const test_v0_message = struct { .num_readonly_unsigned_accounts = 102, }, .account_keys = &.{ - Pubkey.fromString("GubTBrbgk9JwkwX1FkXvsrF1UC2AP7iTgg8SGtgH14QE") catch unreachable, - Pubkey.fromString("5yCD7QeAk5uAduhLZGxePv21RLsVEktPqJG5pbmZx4J4") catch unreachable, + Pubkey.parseBase58String("GubTBrbgk9JwkwX1FkXvsrF1UC2AP7iTgg8SGtgH14QE") catch unreachable, + Pubkey.parseBase58String("5yCD7QeAk5uAduhLZGxePv21RLsVEktPqJG5pbmZx4J4") catch unreachable, }, .recent_blockhash = Hash.parseBase58String("4xzjBNLkRqhBVmZ7JKcX2UEP8wzYKYWpXk7CPXzgrEZW") catch unreachable, .instructions = &.{.{ @@ -837,7 +837,7 @@ pub const test_v0_message = struct { }, }}, .address_table_lookups = &.{.{ - .account_key = Pubkey.fromString("ZETAxsqBRek56DhiGXrn75yj2NHU3aYUnxvHXpkf3aD") catch unreachable, + .account_key = Pubkey.parseBase58String("ZETAxsqBRek56DhiGXrn75yj2NHU3aYUnxvHXpkf3aD") catch unreachable, .writable_indexes = &.{ 1, 3, 5, 7, 90 }, .readonly_indexes = &.{}, }}, diff --git a/src/crypto/base58.zig b/src/crypto/base58.zig deleted file mode 100644 index 17fa303ca..000000000 --- a/src/crypto/base58.zig +++ /dev/null @@ -1,56 +0,0 @@ -const std = @import("std"); -const base58 = @import("base58-zig"); - -const Allocator = std.mem.Allocator; - -pub fn Base58Sized(decoded_size: usize) type { - const encoder = base58.Encoder.init(.{}); - const decoder = base58.Decoder.init(.{}); - - const decoded_size_float = @as(f64, @floatFromInt(decoded_size)); - const max_encoded_size_float = decoded_size_float * (8.0 / std.math.log2(58.0)); - - return struct { - pub const max_encoded_size: usize = @ceil(max_encoded_size_float); - pub const String = std.BoundedArray(u8, max_encoded_size); - - pub fn decode(str: []const u8) ![decoded_size]u8 { - var result_data: [decoded_size]u8 = undefined; - @setEvalBranchQuota(6100); - const decoded_len = try decoder.decode(str, &result_data); - if (decoded_len != decoded_size) return error.InvalidDecodedSize; - return result_data; - } - - pub fn encode(data: [decoded_size]u8) String { - var result: std.BoundedArray(u8, max_encoded_size) = .{}; - // unreachable because `max_encoded_size` is the - // maximum encoded size for `decoded_size` bytes - const encoded_len = encoder.encode(&data, &result.buffer) catch unreachable; - result.len = @intCast(encoded_len); - return result; - } - - pub fn encodeAlloc( - data: [decoded_size]u8, - allocator: Allocator, - ) Allocator.Error![]const u8 { - const buf = try allocator.alloc(u8, max_encoded_size); - const actual_size = encodeToSlice(data, buf[0..max_encoded_size]); - return try allocator.realloc(buf, actual_size); - } - - pub fn encodeToSlice(data: [decoded_size]u8, buf: *[max_encoded_size]u8) usize { - // unreachable because `max_encoded_size` is the - // maximum encoded size for `decoded_size` bytes - const actual_size = encoder.encode(&data, buf[0..]) catch unreachable; - std.debug.assert(actual_size <= max_encoded_size); - return actual_size; - } - - pub fn format(data: [decoded_size]u8, writer: anytype) !void { - const b58_str_bounded = encode(data); - return writer.writeAll(b58_str_bounded.constSlice()); - } - }; -} diff --git a/src/crypto/lib.zig b/src/crypto/lib.zig index 2e16423d7..68a2547af 100644 --- a/src/crypto/lib.zig +++ b/src/crypto/lib.zig @@ -1,4 +1 @@ -pub const base58 = @import("base58.zig"); -pub const fnv = @import("fnv.zig"); - -pub const FnvHasher = fnv.FnvHasher; +pub const FnvHasher = @import("fnv.zig").FnvHasher; diff --git a/src/geyser/main.zig b/src/geyser/main.zig index 19a1f2822..9e59f190b 100644 --- a/src/geyser/main.zig +++ b/src/geyser/main.zig @@ -120,9 +120,9 @@ pub fn getOwnerFilters( var owner_pubkeys = std.AutoArrayHashMap(sig.core.Pubkey, void).init(allocator); errdefer owner_pubkeys.deinit(); - try owner_pubkeys.ensureTotalCapacity(@intCast(owner_accounts_str.len)); + try owner_pubkeys.ensureTotalCapacity(owner_accounts_str.len); for (owner_accounts_str) |owner_str| { - const owner_pubkey = try sig.core.Pubkey.fromString(owner_str); + const owner_pubkey = try sig.core.Pubkey.parseBase58String(owner_str); owner_pubkeys.putAssumeCapacity(owner_pubkey, {}); } @@ -142,7 +142,7 @@ pub fn getAccountFilters( try account_pubkeys.ensureTotalCapacity(@intCast(accounts_str.len)); for (accounts_str) |account_str| { - const account_pubkey = try sig.core.Pubkey.fromString(account_str); + const account_pubkey = try sig.core.Pubkey.parseBase58String(account_str); account_pubkeys.putAssumeCapacity(account_pubkey, {}); } diff --git a/src/gossip/data.zig b/src/gossip/data.zig index 1f4fb7ad5..e8a3c87a5 100644 --- a/src/gossip/data.zig +++ b/src/gossip/data.zig @@ -100,7 +100,7 @@ pub const SignedGossipData = struct { error.NonCanonical => unreachable, }; return .{ - .signature = Signature.init(signature.toBytes()), + .signature = .{ .data = signature.toBytes() }, .data = data, }; } @@ -1739,7 +1739,9 @@ test "contact info bincode serialize matches rust bincode" { // Build identical Sig contact info var sig_contact_info = ContactInfo{ - .pubkey = Pubkey.fromString("4NftWecdfGcYZMJahnAAX5Cw1PLGLZhYFB19wL6AkXqW") catch unreachable, + .pubkey = Pubkey.parseBase58String( + "4NftWecdfGcYZMJahnAAX5Cw1PLGLZhYFB19wL6AkXqW", + ) catch unreachable, .wallclock = 1721060646885, .outset = 1721060141617172, .shred_version = 0, @@ -1919,7 +1921,7 @@ test "RestartHeaviestFork serialization matches rust" { var rust_bytes = [_]u8{ 82, 182, 93, 119, 193, 123, 4, 235, 68, 64, 82, 233, 51, 34, 232, 123, 245, 237, 236, 142, 251, 1, 123, 124, 26, 40, 219, 84, 165, 116, 208, 63, 19, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 20, 0 }; const x = RestartHeaviestFork{ - .from = try Pubkey.fromString("6ZsiX6YcwEa93yWtVwGRiK8Ceoxq2VieVh2pvEiUtpCW"), + .from = try Pubkey.parseBase58String("6ZsiX6YcwEa93yWtVwGRiK8Ceoxq2VieVh2pvEiUtpCW"), .wallclock = 19, .last_slot = 12, .observed_stake = 11, @@ -1945,7 +1947,7 @@ test "RestartLastVotedForkSlots serialization matches rust" { }; const data = RestartLastVotedForkSlots{ - .from = try Pubkey.fromString("6ZsiX6YcwEa93yWtVwGRiK8Ceoxq2VieVh2pvEiUtpCW"), + .from = try Pubkey.parseBase58String("6ZsiX6YcwEa93yWtVwGRiK8Ceoxq2VieVh2pvEiUtpCW"), .wallclock = 0, .last_voted_slot = 0, .last_voted_hash = Hash.ZEROES, diff --git a/src/gossip/dump_service.zig b/src/gossip/dump_service.zig index 8e202c3e3..09742ee97 100644 --- a/src/gossip/dump_service.zig +++ b/src/gossip/dump_service.zig @@ -1,5 +1,6 @@ const std = @import("std"); const sig = @import("../sig.zig"); +const base58 = @import("base58"); const Allocator = std.mem.Allocator; const SignedGossipData = sig.gossip.data.SignedGossipData; @@ -52,20 +53,21 @@ pub const GossipDumpService = struct { const writer = stream.writer(); // write records to string - var encoder_buf: [50]u8 = undefined; - const base58Encoder = @import("base58-zig").Encoder.init(.{}); + const endec = base58.Table.BITCOIN; for (gossip_table.store.values()) |gossip_versioned_data| { const val: SignedGossipData = gossip_versioned_data.value; - const size = try base58Encoder.encode( + + var encoded_buf: [50]u8 = undefined; + const encoded_len = endec.encode( + &encoded_buf, &gossip_versioned_data.value_hash.data, - &encoder_buf, ); - try writer.print("{s},{s},{s},{},", .{ - @tagName(val.data), - val.id(), - encoder_buf[0..size], - val.wallclock(), - }); + const encoded = encoded_buf[0..encoded_len]; + + try writer.print( + "{s},{s},{s},{},", + .{ @tagName(val.data), val.id(), encoded, val.wallclock() }, + ); if (val.data.gossipAddr()) |addr| { try addr.toAddress().format("", .{}, writer); } diff --git a/src/gossip/ping_pong.zig b/src/gossip/ping_pong.zig index 6d45678fd..21e33de9f 100644 --- a/src/gossip/ping_pong.zig +++ b/src/gossip/ping_pong.zig @@ -25,31 +25,29 @@ pub const Ping = struct { token: [PING_TOKEN_SIZE]u8, signature: Signature, - const Self = @This(); - - pub fn init(token: [PING_TOKEN_SIZE]u8, keypair: *const KeyPair) !Self { + pub fn init(token: [PING_TOKEN_SIZE]u8, keypair: *const KeyPair) !Ping { const signature = try keypair.sign(&token, null); - const self = Self{ + const self = .{ .from = Pubkey.fromPublicKey(&keypair.public_key), .token = token, - .signature = Signature.init(signature.toBytes()), + .signature = .{ .data = signature.toBytes() }, }; return self; } - pub fn initRandom(random: std.rand.Random, keypair: *const KeyPair) !Self { + pub fn initRandom(random: std.rand.Random, keypair: *const KeyPair) !Ping { var token: [PING_TOKEN_SIZE]u8 = undefined; random.bytes(&token); - var signature = keypair.sign(&token, null) catch unreachable; // TODO: do we need noise? + const signature = keypair.sign(&token, null) catch unreachable; // TODO: do we need noise? - return Self{ + return Ping{ .from = Pubkey.fromPublicKey(&keypair.public_key), .token = token, - .signature = Signature.init(signature.toBytes()), + .signature = .{ .data = signature.toBytes() }, }; } - pub fn verify(self: *const Self) !void { + pub fn verify(self: *const Ping) !void { if (!try self.signature.verify(self.from, &self.token)) { return error.InvalidSignature; } @@ -61,32 +59,30 @@ pub const Pong = struct { hash: Hash, // Hash of received ping token. signature: Signature, - const Self = @This(); - - pub fn init(ping: *const Ping, keypair: *const KeyPair) !Self { + pub fn init(ping: *const Ping, keypair: *const KeyPair) !Pong { var token_with_prefix = PING_PONG_HASH_PREFIX ++ ping.token; var hash = Hash.generateSha256Hash(token_with_prefix[0..]); const signature = keypair.sign(&hash.data, null) catch return error.SignatureError; - return Self{ + return .{ .from = Pubkey.fromPublicKey(&keypair.public_key), .hash = hash, - .signature = Signature.init(signature.toBytes()), + .signature = .{ .data = signature.toBytes() }, }; } - pub fn verify(self: *const Self) !void { + pub fn verify(self: *const Pong) !void { if (!try self.signature.verify(self.from, &self.hash.data)) { return error.InvalidSignature; } } - pub fn initRandom(random: std.rand.Random, keypair: *const KeyPair) !Self { + pub fn initRandom(random: std.rand.Random, keypair: *const KeyPair) !Pong { const ping = try Ping.initRandom(random, keypair); return try Pong.init(&ping, keypair); } - pub fn eql(self: *const @This(), other: *const @This()) bool { + pub fn eql(self: *const Pong, other: *const @This()) bool { return std.mem.eql(u8, &self.from.data, &other.from.data) and std.mem.eql(u8, &self.hash.data, &other.hash.data) and std.mem.eql(u8, &self.signature.data, &other.signature.data); diff --git a/src/gossip/prune.zig b/src/gossip/prune.zig index 316a786f8..3767b7f7c 100644 --- a/src/gossip/prune.zig +++ b/src/gossip/prune.zig @@ -26,19 +26,17 @@ pub const PruneData = struct { /// Wallclock of the node that generated this message wallclock: u64, - const Self = @This(); - - pub fn init(pubkey: Pubkey, prunes: []Pubkey, destination: Pubkey, now: u64) Self { - return Self{ + pub fn init(pubkey: Pubkey, prunes: []const Pubkey, destination: Pubkey, now: u64) PruneData { + return .{ .pubkey = pubkey, .prunes = prunes, .destination = destination, - .signature = Signature.init(.{0} ** 64), + .signature = Signature.ZEROES, .wallclock = now, }; } - pub fn deinit(self: Self, allocator: std.mem.Allocator) void { + pub fn deinit(self: PruneData, allocator: std.mem.Allocator) void { allocator.free(self.prunes); } @@ -61,7 +59,7 @@ pub const PruneData = struct { var self = PruneData{ .pubkey = Pubkey.fromPublicKey(&keypair.public_key), .prunes = &[0]Pubkey{}, - .signature = Signature.init(.{0} ** 64), + .signature = Signature.ZEROES, .destination = Pubkey.initRandom(random), .wallclock = getWallclockMs(), }; @@ -156,15 +154,17 @@ test "sign/verify PruneData with prefix" { 230, 102, 29, 182, 139, 6, 61, 35, 28, 233, 6, 63, 229, })); const pubkey = Pubkey.fromPublicKey(&keypair.public_key); - const expected_pubkey = try Pubkey.fromString("5zYQ7PqYa81fw3rXAYUtmUcoL9TFwG67wcE9LW8hwtfE"); + const expected_pubkey = try Pubkey.parseBase58String( + "5zYQ7PqYa81fw3rXAYUtmUcoL9TFwG67wcE9LW8hwtfE", + ); try std.testing.expectEqual(expected_pubkey.data, pubkey.data); - const prune1 = try Pubkey.fromString("1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM"); - const prune2 = try Pubkey.fromString("1111111ogCyDbaRMvkdsHB3qfdyFYaG1WtRUAfdh"); - const prune3 = try Pubkey.fromString("11111112D1oxKts8YPdTJRG5FzxTNpMtWmq8hkVx3"); - const destination = try Pubkey.fromString("11111112cMQwSC9qirWGjZM6gLGwW69X22mqwLLGP"); + const prune1 = try Pubkey.parseBase58String("1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM"); + const prune2 = try Pubkey.parseBase58String("1111111ogCyDbaRMvkdsHB3qfdyFYaG1WtRUAfdh"); + const prune3 = try Pubkey.parseBase58String("11111112D1oxKts8YPdTJRG5FzxTNpMtWmq8hkVx3"); + const destination = try Pubkey.parseBase58String("11111112cMQwSC9qirWGjZM6gLGwW69X22mqwLLGP"); - const expected_signature = try Signature.fromString( + const expected_signature = try Signature.parseBase58String( "XjXQxG6vhrfPPQtddCgkfmKsH69YoUvG6GTrQfvmB73GUTjXCL5VDBE3Na94e4uT2MWPTBP3cinVdpHdBb9zAxY", ); @@ -207,7 +207,7 @@ test "PruneData sig verify" { var prune_v2 = PruneData{ .pubkey = Pubkey.fromPublicKey(&keypair.public_key), .prunes = &[0]Pubkey{}, - .signature = Signature.init(.{0} ** 64), + .signature = Signature.ZEROES, .destination = Pubkey.fromPublicKey(&keypair.public_key), .wallclock = 0, }; diff --git a/src/ledger/benchmarks.zig b/src/ledger/benchmarks.zig index fc81f8b51..1e4edf8f4 100644 --- a/src/ledger/benchmarks.zig +++ b/src/ledger/benchmarks.zig @@ -306,7 +306,7 @@ pub const BenchmarkLedger = struct { for (random_bytes[0..]) |*byte| { byte.* = rng.random().int(u8); } - signatures.appendAssumeCapacity(Signature.init(random_bytes)); + signatures.appendAssumeCapacity(.{ .data = random_bytes }); } const slot = 5; diff --git a/src/ledger/cleanup_service.zig b/src/ledger/cleanup_service.zig index b4d54197f..f7e9563f2 100644 --- a/src/ledger/cleanup_service.zig +++ b/src/ledger/cleanup_service.zig @@ -255,8 +255,8 @@ fn writePurgeRange(write_batch: *BlockstoreDB.WriteBatch, from_slot: Slot, to_sl try purgeRangeWithCount( write_batch, schema.transaction_status, - .{ Signature.default(), from_slot }, - .{ Signature.default(), to_slot }, + .{ Signature.ZEROES, from_slot }, + .{ Signature.ZEROES, to_slot }, &delete_count, ); // NOTE: for `address_signatures`, agave doesnt key based on slot for some reason @@ -265,15 +265,15 @@ fn writePurgeRange(write_batch: *BlockstoreDB.WriteBatch, from_slot: Slot, to_sl try purgeRangeWithCount( write_batch, schema.address_signatures, - .{ .slot = from_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.default() }, - .{ .slot = to_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.default() }, + .{ .slot = from_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.ZEROES }, + .{ .slot = to_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.ZEROES }, &delete_count, ); try purgeRangeWithCount( write_batch, schema.transaction_memos, - .{ Signature.default(), from_slot }, - .{ Signature.default(), to_slot }, + .{ Signature.ZEROES, from_slot }, + .{ Signature.ZEROES, to_slot }, &delete_count, ); try purgeRangeWithCount(write_batch, schema.transaction_status_index, from_slot, to_slot, &delete_count); @@ -337,8 +337,8 @@ fn purgeFilesInRange(db: *BlockstoreDB, from_slot: Slot, to_slot: Slot) !void { try purgeFileRangeWithCount( db, schema.transaction_status, - .{ Signature.default(), from_slot }, - .{ Signature.default(), to_slot }, + .{ Signature.ZEROES, from_slot }, + .{ Signature.ZEROES, to_slot }, &delete_count, ); // NOTE: for `address_signatures`, agave doesnt key based on slot for some reason @@ -347,15 +347,15 @@ fn purgeFilesInRange(db: *BlockstoreDB, from_slot: Slot, to_slot: Slot) !void { try purgeFileRangeWithCount( db, schema.address_signatures, - .{ .slot = from_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.default() }, - .{ .slot = to_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.default() }, + .{ .slot = from_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.ZEROES }, + .{ .slot = to_slot, .address = Pubkey.ZEROES, .transaction_index = 0, .signature = Signature.ZEROES }, &delete_count, ); try purgeFileRangeWithCount( db, schema.transaction_memos, - .{ Signature.default(), from_slot }, - .{ Signature.default(), to_slot }, + .{ Signature.ZEROES, from_slot }, + .{ Signature.ZEROES, to_slot }, &delete_count, ); try purgeFileRangeWithCount(db, schema.transaction_status_index, from_slot, to_slot, &delete_count); diff --git a/src/ledger/reader.zig b/src/ledger/reader.zig index 49cf7daf3..39f568467 100644 --- a/src/ledger/reader.zig +++ b/src/ledger/reader.zig @@ -559,14 +559,17 @@ pub const BlockstoreReader = struct { const block_time = try self.db.get(self.allocator, schema.blocktime, slot); const block_height = try self.db.get(self.allocator, schema.block_height, slot); - return VersionedConfirmedBlockWithEntries{ - .block = VersionedConfirmedBlock{ + const transactions = try txns_with_statuses.toOwnedSlice(); + errdefer self.allocator.free(transactions); + + return .{ + .block = .{ .allocator = self.allocator, - .previous_blockhash = try previous_blockhash.base58EncodeAlloc(self.allocator), - .blockhash = try blockhash.base58EncodeAlloc(self.allocator), + .previous_blockhash = previous_blockhash, + .blockhash = blockhash, // If the slot is full it should have parent_slot populated from shreds received. .parent_slot = slot_meta.parent_slot orelse return error.MissingParentSlot, - .transactions = try txns_with_statuses.toOwnedSlice(), + .transactions = transactions, .rewards = rewards.rewards, .num_partitions = rewards.num_partitions, .block_time = block_time, @@ -839,7 +842,7 @@ pub const BlockstoreReader = struct { .address = address, .slot = slot, .transaction_index = 0, - .signature = Signature.init(.{0} ** 64), + .signature = Signature.ZEROES, }); // Iterate until limit is reached @@ -949,7 +952,7 @@ pub const BlockstoreReader = struct { .address = pubkey, .slot = @max(slot, lowest_available_slot), .transaction_index = 0, - .signature = Signature.init(.{0} ** 64), + .signature = Signature.ZEROES, }); defer index_iterator.deinit(); while (try index_iterator.nextKey()) |key| { @@ -1376,8 +1379,8 @@ const CompletedRanges = ArrayList(struct { u32, u32 }); /// is always present. Used for uploading to BigTable. pub const VersionedConfirmedBlock = struct { allocator: Allocator, - previous_blockhash: []const u8, - blockhash: []const u8, + previous_blockhash: Hash, + blockhash: Hash, parent_slot: Slot, transactions: []const VersionedTransactionWithStatusMeta, rewards: []const ledger.meta.Reward, @@ -1390,8 +1393,6 @@ pub const VersionedConfirmedBlock = struct { for (self.rewards) |it| it.deinit(allocator); allocator.free(self.transactions); allocator.free(self.rewards); - allocator.free(self.previous_blockhash); - allocator.free(self.blockhash); } }; diff --git a/src/ledger/shred.zig b/src/ledger/shred.zig index aa2cb0ca7..9f74f3bec 100644 --- a/src/ledger/shred.zig +++ b/src/ledger/shred.zig @@ -18,7 +18,7 @@ const checkedSub = sig.utils.math.checkedSub; pub const MAX_SHREDS_PER_SLOT: usize = code_shred_constants.max_per_slot + data_shred_constants.max_per_slot; pub const DATA_SHREDS_PER_FEC_BLOCK: usize = 32; -const SIZE_OF_MERKLE_ROOT: usize = sig.core.Hash.size; +const SIZE_OF_MERKLE_ROOT: usize = sig.core.Hash.SIZE; pub const code_shred_constants = ShredConstants{ .max_per_slot = 32_768, @@ -316,14 +316,14 @@ pub const DataShred = struct { shard: []const u8, ) !Self { const shard_size = shard.len; - if (shard_size + Signature.size > constants.payload_size) { + if (shard_size + Signature.SIZE > constants.payload_size) { return error.InvalidShardSize; } const payload = try allocator.alloc(u8, constants.payload_size); errdefer allocator.free(payload); - @memcpy(payload[0..Signature.size], &leader_signature.data); - @memcpy(payload[Signature.size..][0..shard_size], shard); - @memset(payload[Signature.size + shard_size ..], 0); + @memcpy(payload[0..Signature.SIZE], &leader_signature.data); + @memcpy(payload[Signature.SIZE..][0..shard_size], shard); + @memset(payload[Signature.SIZE + shard_size ..], 0); var shred = try generic.fromPayloadOwned(allocator, payload); if (shard_size != try capacity(code_shred_constants, shred.common.variant)) return error.InvalidShardSize; @@ -489,7 +489,7 @@ fn generic_shred(shred_type: ShredType) type { fn merkleNode(self: Self) !Hash { const offset = try proofOffset(constants, self.common.variant); - return getMerkleNodeAt(self.payload, Signature.size, offset); + return getMerkleNodeAt(self.payload, Signature.SIZE, offset); } fn erasureShardAsSlice(self: *const Self) ![]const u8 { @@ -502,7 +502,7 @@ fn generic_shred(shred_type: ShredType) type { return error.InsufficientPayloadSize; } const start = switch (self.common.variant.shred_type) { - .data => Signature.size, + .data => Signature.SIZE, .code => constants.headers_size, }; return self.payload[start..end]; @@ -526,11 +526,11 @@ fn generic_shred(shred_type: ShredType) type { /// agave: retransmitter_signature fn retransmitterSignature(self: Self) !Signature { const offset = try retransmitterSignatureOffset(self.common.variant); - const end = offset + Signature.size; + const end = offset + Signature.SIZE; if (self.payload.len < end) { return error.InvalidPayloadSize; } - var sig_bytes: [Signature.size]u8 = undefined; + var sig_bytes: [Signature.SIZE]u8 = undefined; @memcpy(&sig_bytes, self.payload[offset..end]); return .{ .data = sig_bytes }; } @@ -574,7 +574,7 @@ fn getMerkleRoot( }; const proof = try getMerkleProofFor(shred, constants, variant); const offset = try proofOffset(constants, variant); - const node = try getMerkleNodeAt(shred, Signature.size, offset); + const node = try getMerkleNodeAt(shred, Signature.SIZE, offset); return calculateMerkleRoot(index, node, proof); } @@ -635,7 +635,7 @@ pub fn setMerkleProof(shred: []u8, proof: MerkleProofEntryList) !void { pub fn getMerkleNode(shred: []const u8) !Hash { const variant = layout.getShredVariant(shred) orelse return error.UnknownShredVariant; const offset = try proofOffset(variant.constants(), variant); - return getMerkleNodeAt(shred, Signature.size, offset); + return getMerkleNodeAt(shred, Signature.SIZE, offset); } fn getMerkleNodeAt(shred: []const u8, start: usize, end: usize) !Hash { @@ -773,7 +773,7 @@ fn capacity(constants: ShredConstants, variant: ShredVariant) !usize { constants.headers_size + (if (variant.chained) SIZE_OF_MERKLE_ROOT else 0) + variant.proof_size * merkle_proof_entry_size + - (if (variant.resigned) Signature.size else 0), + (if (variant.resigned) Signature.SIZE else 0), ) catch error.InvalidProofSize; } @@ -854,7 +854,7 @@ pub const MerkleProofEntryList = struct { /// agave: setRetransmitterSignature fn setRetransmitterSignatureFor(shred: []u8, variant: ShredVariant, signature: Signature) !void { const offset = try retransmitterSignatureOffset(variant); - const end = offset + Signature.size; + const end = offset + Signature.SIZE; if (shred.len < end) { return error.InvalidPayloadSize; } @@ -874,10 +874,8 @@ pub const CommonHeader = struct { pub const @"!bincode-config:variant" = ShredVariantConfig; - const Self = @This(); - - const ZEROED_FOR_TEST = Self{ - .leader_signature = Signature{ .data = .{0} ** Signature.size }, + const ZEROED_FOR_TEST = .{ + .leader_signature = Signature{ .data = .{0} ** Signature.SIZE }, .variant = ShredVariant{ .shred_type = .data, .proof_size = 0, .chained = false, .resigned = false }, .slot = 0, .index = 0, @@ -886,8 +884,8 @@ pub const CommonHeader = struct { }; // Identifier for the erasure code set that the shred belongs to. - pub fn erasureSetId(self: @This()) ErasureSetId { - return ErasureSetId{ + pub fn erasureSetId(self: CommonHeader) ErasureSetId { + return .{ .slot = self.slot, .erasure_set_index = self.erasure_set_index, }; @@ -1075,7 +1073,7 @@ pub const layout = struct { const SIZE_OF_COMMON_SHRED_HEADER: usize = 83; const SIZE_OF_DATA_SHRED_HEADERS: usize = 88; const SIZE_OF_CODE_SHRED_HEADERS: usize = 89; - const SIZE_OF_SIGNATURE: usize = sig.core.Signature.size; + const SIZE_OF_SIGNATURE: usize = sig.core.Signature.SIZE; const SIZE_OF_SHRED_VARIANT: usize = 1; const SIZE_OF_SHRED_SLOT: usize = 8; const SIZE_OF_INDEX: usize = 4; @@ -1127,10 +1125,10 @@ pub const layout = struct { } pub fn getLeaderSignature(shred: []const u8) ?Signature { - if (shred.len < Signature.size) { + if (shred.len < Signature.SIZE) { return null; } - return Signature.init(shred[0..SIZE_OF_SIGNATURE].*); + return .{ .data = shred[0..SIZE_OF_SIGNATURE].* }; } pub fn merkleRoot(shred: []const u8) ?Hash { @@ -1158,7 +1156,7 @@ pub const layout = struct { const offset = getChainedMerkleRootOffset(variant) catch return null; const end = offset +| SIZE_OF_MERKLE_ROOT; if (shred.len < end) return null; - return Hash.fromSizedSlice(shred[offset..][0..SIZE_OF_MERKLE_ROOT]); + return .{ .data = shred[offset..][0..SIZE_OF_MERKLE_ROOT].* }; } pub fn setRetransmitterSignature(shred: []u8, signature: Signature) !void { diff --git a/src/ledger/shred_inserter/recovery.zig b/src/ledger/shred_inserter/recovery.zig index f4b99ed9a..575131445 100644 --- a/src/ledger/shred_inserter/recovery.zig +++ b/src/ledger/shred_inserter/recovery.zig @@ -462,7 +462,7 @@ const expected_metadata = blk: { break :blk RecoveryMetadata{ .common_header = CommonHeader{ - .leader_signature = Signature.fromString( + .leader_signature = Signature.parseBase58String( "ksnjzXzraR5hWthnKAWVgJkDBUoRX8CHpLttYs2s" ++ "AmhPFvh6Ga6HMTLMKRi45p1PfLevfm272ANmwTBEvGwW19m", ) catch unreachable, diff --git a/src/ledger/shred_inserter/shred_inserter.zig b/src/ledger/shred_inserter/shred_inserter.zig index 544706a66..1ab572e8f 100644 --- a/src/ledger/shred_inserter/shred_inserter.zig +++ b/src/ledger/shred_inserter/shred_inserter.zig @@ -1550,7 +1550,7 @@ test "recovery" { const code_shreds = shreds[34..68]; var leader_schedule = OneSlotLeaders{ - .leader = try Pubkey.fromString("2iWGQbhdWWAA15KTBJuqvAxCdKmEvY26BoFRBU4419Sn"), + .leader = try Pubkey.parseBase58String("2iWGQbhdWWAA15KTBJuqvAxCdKmEvY26BoFRBU4419Sn"), }; const is_repairs = try allocator.alloc(bool, code_shreds.len); diff --git a/src/ledger/tests.zig b/src/ledger/tests.zig index aef3f6ffa..d212da90a 100644 --- a/src/ledger/tests.zig +++ b/src/ledger/tests.zig @@ -62,7 +62,6 @@ test "insert shreds and transaction statuses then get blocks" { defer result.deinit(); const blockhash = result.entries[result.entries.len - 1].hash; - const blockhash_string = blockhash.base58String(); defer state.deinit(); const allocator = state.allocator; @@ -84,12 +83,12 @@ test "insert shreds and transaction statuses then get blocks" { const confirmed_block = try reader.getRootedBlock(slot, false); defer confirmed_block.deinit(allocator); try std.testing.expectEqual(100, confirmed_block.transactions.len); - const expected_block = ledger.reader.VersionedConfirmedBlock{ + const expected_block: ledger.reader.VersionedConfirmedBlock = .{ .allocator = allocator, .transactions = result.expected_transactions, .parent_slot = slot - 1, - .blockhash = blockhash_string.slice(), - .previous_blockhash = sig.core.Hash.ZEROES.base58String().slice(), + .blockhash = blockhash, + .previous_blockhash = sig.core.Hash.ZEROES, .rewards = &.{}, .num_partitions = null, .block_time = null, @@ -105,8 +104,8 @@ test "insert shreds and transaction statuses then get blocks" { .allocator = allocator, .transactions = result.expected_transactions, .parent_slot = slot, - .blockhash = blockhash_string.slice(), - .previous_blockhash = blockhash_string.slice(), + .blockhash = blockhash, + .previous_blockhash = blockhash, .rewards = &.{}, .num_partitions = null, .block_time = null, @@ -123,8 +122,8 @@ test "insert shreds and transaction statuses then get blocks" { .allocator = allocator, .transactions = result.expected_transactions, .parent_slot = slot + 1, - .blockhash = blockhash_string.slice(), - .previous_blockhash = blockhash_string.slice(), + .blockhash = blockhash, + .previous_blockhash = blockhash, .rewards = &.{}, .num_partitions = null, .block_time = null, diff --git a/src/rpc/client.zig b/src/rpc/client.zig index fa03efaf3..22efe3817 100644 --- a/src/rpc/client.zig +++ b/src/rpc/client.zig @@ -1,6 +1,6 @@ const std = @import("std"); -const base58 = @import("base58-zig"); const sig = @import("../sig.zig"); +const base58 = @import("base58"); const types = sig.rpc.types; @@ -77,7 +77,7 @@ pub const Client = struct { pub fn getAccountInfo(self: *Client, allocator: std.mem.Allocator, pubkey: Pubkey, config: GetAccountInfoConfig) !Response(types.AccountInfo) { var request = try Request.init(allocator, "getAccountInfo"); defer request.deinit(); - try request.addParameter(pubkey.string().slice()); + try request.addParameter(pubkey.base58String().slice()); try request.addConfig(config); return self.sendFetchRequest(allocator, types.AccountInfo, request, .{}); } @@ -90,7 +90,7 @@ pub const Client = struct { pub fn getBalance(self: *Client, allocator: std.mem.Allocator, pubkey: Pubkey, config: GetBalanceConfig) !Response(types.Balance) { var request = try Request.init(allocator, "getBalance"); defer request.deinit(); - try request.addParameter(pubkey.string().slice()); + try request.addParameter(pubkey.base58String().slice()); try request.addConfig(config); return self.sendFetchRequest(allocator, types.Balance, request, .{}); } @@ -289,7 +289,7 @@ pub const Client = struct { ) !Response(types.Signature) { var request = try Request.init(allocator, "requestAirdrop"); defer request.deinit(); - try request.addParameter(pubkey.string().slice()); + try request.addParameter(pubkey.base58String().slice()); try request.addParameter(lamports); try request.addConfig(config); return self.sendFetchRequest(allocator, types.Signature, request, .{}); @@ -326,13 +326,12 @@ pub const Client = struct { var buffer: [sig.net.PACKET_DATA_SIZE]u8 = undefined; const written = try sig.bincode.writeToSlice(&buffer, transaction, .{}); - const sized = sig.crypto.base58.Base58Sized(sig.net.PACKET_DATA_SIZE); - var encode_buffer: [sized.max_encoded_size]u8 = undefined; + const endec = base58.Table.BITCOIN; + var encoded_buffer: [base58.encodedMaxSize(buffer.len)]u8 = undefined; + const length = endec.encode(&encoded_buffer, written); + const encoded = encoded_buffer[0..length]; - var encoder = base58.Encoder.init(.{}); - const length = try encoder.encode(written, &encode_buffer); - - try request.addParameter(encode_buffer[0..length]); + try request.addParameter(encoded); try request.addConfig(config); return self.sendFetchRequest(allocator, types.Signature, request, .{}); @@ -440,7 +439,7 @@ test "getAccountInfo: null value" { var client = Client.init(allocator, .Testnet, .{}); defer client.deinit(); // random pubkey that should not exist - const pubkey = try Pubkey.fromString("Bkd9xbHF7JgwXmEib6uU3y582WaPWWiasPxzMesiBwWn"); + const pubkey = try Pubkey.parseBase58String("Bkd9xbHF7JgwXmEib6uU3y582WaPWWiasPxzMesiBwWn"); const response = try client.getAccountInfo(allocator, pubkey, .{}); defer response.deinit(); const x = try response.result(); @@ -453,7 +452,7 @@ test "getAccountInfo" { const allocator = std.testing.allocator; var client = Client.init(allocator, .Testnet, .{}); defer client.deinit(); - const pubkey = try Pubkey.fromString("Bkd9xbHF7JgwXmEib6uU3y582WaPWWiasPxzMesiBwWm"); + const pubkey = try Pubkey.parseBase58String("Bkd9xbHF7JgwXmEib6uU3y582WaPWWiasPxzMesiBwWm"); const response = try client.getAccountInfo(allocator, pubkey, .{}); defer response.deinit(); _ = try response.result(); @@ -464,7 +463,7 @@ test "getBalance" { const allocator = std.testing.allocator; var client = Client.init(allocator, .Testnet, .{}); defer client.deinit(); - const pubkey = try Pubkey.fromString("Bkd9xbHF7JgwXmEib6uU3y582WaPWWiasPxzMesiBwWm"); + const pubkey = try Pubkey.parseBase58String("Bkd9xbHF7JgwXmEib6uU3y582WaPWWiasPxzMesiBwWm"); const response = try client.getBalance(allocator, pubkey, .{}); defer response.deinit(); _ = try response.result(); @@ -558,10 +557,10 @@ test "getSignatureStatuses" { defer client.deinit(); var signatures = try allocator.alloc(Signature, 2); defer allocator.free(signatures); - signatures[0] = try Signature.fromString( + signatures[0] = try Signature.parseBase58String( "56H13bd79hzZa67gMACJYsKxb5MdfqHhe3ceEKHuBEa7hgjMgAA4Daivx68gBFUa92pxMnhCunngcP3dpVnvczGp", ); - signatures[1] = try Signature.fromString( + signatures[1] = try Signature.parseBase58String( "4K6Gjut37p3ajRtsN2s6q1Miywit8VyP7bAYLfVSkripdNJkF3bL6BWG7dauzZGMr3jfsuFaPR91k2NuuCc7EqAz", ); const response = try client.getSignatureStatuses(allocator, signatures, .{}); @@ -646,10 +645,14 @@ test "getVoteAccounts response parses correctly" { .commission = 0, .epochVoteAccount = true, .epochCredits = &.{ .{ 1, 64, 0 }, .{ 2, 192, 64 } }, - .nodePubkey = try Pubkey.fromString("B97CCUW3AEZFGy6uUg6zUdnNYvnVq5VG8PUtb2HayTDD"), + .nodePubkey = try Pubkey.parseBase58String( + "B97CCUW3AEZFGy6uUg6zUdnNYvnVq5VG8PUtb2HayTDD", + ), .lastVote = 147, .activatedStake = 42, - .votePubkey = try Pubkey.fromString("3ZT31jkAGhUaw8jsy4bTknwBMP8i4Eueh52By4zXcsVw"), + .votePubkey = try Pubkey.parseBase58String( + "3ZT31jkAGhUaw8jsy4bTknwBMP8i4Eueh52By4zXcsVw", + ), .rootSlot = 100, }}, .delinquent = &.{}, diff --git a/src/shred_network/repair_message.zig b/src/shred_network/repair_message.zig index 37cde271c..54c401e7d 100644 --- a/src/shred_network/repair_message.zig +++ b/src/shred_network/repair_message.zig @@ -51,9 +51,9 @@ pub fn serializeRepairRequest( timestamp: u64, nonce: Nonce, ) ![]u8 { - const header = RepairRequestHeader{ - .signature = Signature.init(undefined), - .sender = try Pubkey.fromBytes(&keypair.public_key.bytes), + const header: RepairRequestHeader = .{ + .signature = .{ .data = undefined }, + .sender = .{ .data = keypair.public_key.bytes }, .recipient = recipient, .timestamp = timestamp, .nonce = nonce, @@ -78,8 +78,8 @@ pub fn serializeRepairRequest( var signer = try keypair.signer(null); // TODO noise signer.update(serialized[0..4]); - signer.update(serialized[4 + Signature.size ..]); - @memcpy(serialized[4 .. 4 + Signature.size], &signer.finalize().toBytes()); + signer.update(serialized[4 + Signature.SIZE ..]); + @memcpy(serialized[4 .. 4 + Signature.SIZE], &signer.finalize().toBytes()); return serialized; } @@ -166,14 +166,14 @@ pub const RepairMessage = union(enum(u8)) { } // signature is valid - if (serialized.len < 4 + Signature.size) { + if (serialized.len < 4 + Signature.SIZE) { return error.Malformed; } var verifier = header.signature.verifier(header.sender) catch { return error.InvalidSignature; }; verifier.update(serialized[0..4]); - verifier.update(serialized[4 + Signature.size ..]); + verifier.update(serialized[4 + Signature.SIZE ..]); verifier.verify() catch { return error.InvalidSignature; }; @@ -238,11 +238,11 @@ test "signed/serialized RepairRequest is valid" { test "RepairRequestHeader serialization round trip" { var prng = std.rand.DefaultPrng.init(5224); - var signature: [Signature.size]u8 = undefined; + var signature: [Signature.SIZE]u8 = undefined; prng.fill(&signature); - const header = RepairRequestHeader{ - .signature = Signature.init(signature), + const header: RepairRequestHeader = .{ + .signature = .{ .data = signature }, .sender = Pubkey.initRandom(prng.random()), .recipient = Pubkey.initRandom(prng.random()), .timestamp = 5924, @@ -405,11 +405,11 @@ const testHelpers = struct { } fn randomRepairRequestHeader(random: std.rand.Random) RepairRequestHeader { - var signature: [Signature.size]u8 = undefined; + var signature: [Signature.SIZE]u8 = undefined; random.bytes(&signature); - return RepairRequestHeader{ - .signature = Signature.init(signature), + return .{ + .signature = .{ .data = signature }, .sender = Pubkey.initRandom(random), .recipient = Pubkey.initRandom(random), .timestamp = random.int(u64), diff --git a/src/shred_network/turbine_tree.zig b/src/shred_network/turbine_tree.zig index cc9011c37..0d05a1f76 100644 --- a/src/shred_network/turbine_tree.zig +++ b/src/shred_network/turbine_tree.zig @@ -492,7 +492,10 @@ const TestEnvironment = struct { ); try contact_info.setSocket(.turbine_recv, SocketAddr.initRandom(params.random)); _ = try gossip_table.insert( - SignedGossipData{ .signature = .{}, .data = .{ .ContactInfo = contact_info } }, + .{ + .signature = sig.core.Signature.ZEROES, + .data = .{ .ContactInfo = contact_info }, + }, 0, ); if (i == 0) my_contact_info = ThreadSafeContactInfo.fromContactInfo(contact_info); @@ -807,7 +810,9 @@ test "agave: get retransmit nodes round trip" { test "agave-equivalence: get seeeded rng" { { - const pubkey = try Pubkey.fromString("57fFnkGGWzfnhmQEqbCBtZoYnNh26QxFa3FXZJhLmA19"); + const pubkey = try Pubkey.parseBase58String( + "57fFnkGGWzfnhmQEqbCBtZoYnNh26QxFa3FXZJhLmA19", + ); const shred_id = ShredId{ .slot = 1_013, .index = 10, .shred_type = .data }; var chacha = TurbineTree.getSeededRng(pubkey, shred_id); const rng = chacha.random(); @@ -816,7 +821,9 @@ test "agave-equivalence: get seeeded rng" { try std.testing.expectEqual(3913197096749217054, rng.int(u64)); } { - const pubkey = try Pubkey.fromString("3qChSzvc79TAKbd7jM8uAGHzeNh6PTjvQR8WPFiftNUq"); + const pubkey = try Pubkey.parseBase58String( + "3qChSzvc79TAKbd7jM8uAGHzeNh6PTjvQR8WPFiftNUq", + ); const shred_id = ShredId{ .slot = 200_378, .index = 0, .shred_type = .data }; var chacha = TurbineTree.getSeededRng(pubkey, shred_id); const rng = chacha.random(); diff --git a/src/transaction_sender/leader_info.zig b/src/transaction_sender/leader_info.zig index 73569ad01..a8635b383 100644 --- a/src/transaction_sender/leader_info.zig +++ b/src/transaction_sender/leader_info.zig @@ -1,6 +1,4 @@ const std = @import("std"); -const network = @import("zig-network"); -const base58 = @import("base58-zig"); const sig = @import("../sig.zig"); const Allocator = std.mem.Allocator; diff --git a/src/transaction_sender/mock_transfer_generator.zig b/src/transaction_sender/mock_transfer_generator.zig index e05d65cd6..74467df5d 100644 --- a/src/transaction_sender/mock_transfer_generator.zig +++ b/src/transaction_sender/mock_transfer_generator.zig @@ -167,11 +167,10 @@ pub const MockTransferService = struct { pub fn rpcTransferAndWait(self: *MockTransferService, random: std.Random, from_keypair: KeyPair, to_pubkey: Pubkey, lamports: u64) !void { const from_pubkey = Pubkey.fromPublicKey(&from_keypair.public_key); for (0..MAX_RPC_RETRIES) |_| { - self.logger.debug().logf("rpc transfer: amount={} from_pubkey={s} to_pubkey={s}", .{ - lamports, - from_pubkey.string().slice(), - to_pubkey.string().slice(), - }); + self.logger.debug().logf( + "rpc transfer: amount={} from_pubkey={s} to_pubkey={s}", + .{ lamports, from_pubkey, to_pubkey }, + ); const latest_blockhash, _ = blk: { const blockhash_response = try self.rpc_client.getLatestBlockhash(self.allocator, .{}); @@ -200,7 +199,7 @@ pub const MockTransferService = struct { self.logger.debug().logf("rpc transfer failed with: {}", .{err}); return error.RpcTransferFailed; }; - break :blk try Signature.fromString(signature_string); + break :blk try Signature.parseBase58String(signature_string); }; const signature_confirmed = try self.waitForSignatureConfirmation( @@ -362,7 +361,7 @@ pub const MockTransferService = struct { const response = try self.rpc_client.requestAirDrop(self.allocator, pubkey, lamports, .{}); defer response.deinit(); const signature_string = try response.result(); - break :blk try Signature.fromString(signature_string); + break :blk try Signature.parseBase58String(signature_string); }; const signature_confirmed = try self.waitForSignatureConfirmation( signature,