Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add CROState to vertices; partial linearisation #263

Merged
merged 38 commits into from
Dec 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
b56795b
feat: init adding CROState ot object
JanLewDev Nov 18, 2024
ff3b357
introduce past lightcone wip
JanLewDev Nov 18, 2024
5fcf30d
refactor: rm lightcone naming, pastInDegree calculation
JanLewDev Nov 20, 2024
780a498
fix: biome formatting
JanLewDev Nov 20, 2024
9eedd9d
fix: passing map by value
JanLewDev Nov 21, 2024
b1c976c
refactor: rm unnecessary in-degree
JanLewDev Nov 22, 2024
99bd993
formatting
JanLewDev Nov 22, 2024
bc1276d
fix: cleanup; apply comments; ready for partial linearisation
JanLewDev Nov 25, 2024
ea4259b
add vertex distance to hash graph
trungnotchung Nov 26, 2024
1e5164d
compute vertex distance
trungnotchung Nov 26, 2024
ff206f9
update import/export type
trungnotchung Nov 26, 2024
fd77500
compute lca of two vertices
trungnotchung Nov 26, 2024
a07dc0a
add partial linearization
trungnotchung Nov 26, 2024
5728301
feat: use lca in state calculation
JanLewDev Nov 26, 2024
e8adfc2
fix lowest common ancestor logic
trungnotchung Nov 27, 2024
c436b74
update linearization logic multiple semantics
trungnotchung Nov 27, 2024
d55042d
update linearization logic pair semantics
trungnotchung Nov 27, 2024
3e15737
Merge branch 'main' into feat/vertex-state
trungnotchung Nov 28, 2024
f26ef23
fix lowest common ancestor when lca not found
trungnotchung Nov 28, 2024
7c47c9d
fix cro cloning
trungnotchung Nov 28, 2024
0ed4252
wip: test vertex state
trungnotchung Nov 28, 2024
da5f9ce
remove unused import
trungnotchung Nov 29, 2024
8a6bdf7
refactor: state just stores attributes
JanLewDev Nov 29, 2024
b905fb8
fix lowest common ancestor bug
trungnotchung Nov 29, 2024
a54e600
format code
trungnotchung Nov 29, 2024
38efd05
fix: state calculation
JanLewDev Nov 29, 2024
c641229
feat: add a tricky vertex state test
JanLewDev Nov 29, 2024
b1621ee
fix formatting in test
JanLewDev Nov 29, 2024
be0d748
test mega complex case
trungnotchung Nov 30, 2024
73087fb
format test
trungnotchung Nov 30, 2024
1971338
merge main
d-roak Dec 3, 2024
0a32e3d
run biome
d-roak Dec 3, 2024
fd2b27a
fix tests
d-roak Dec 3, 2024
1f3c3e7
update test comment
trungnotchung Dec 3, 2024
8d4a3bf
update test comment
trungnotchung Dec 3, 2024
73985ae
fix: refactor duplicated code
JanLewDev Dec 3, 2024
731fdb6
Merge branch 'feat/vertex-state' of https://github.com/topology-found…
JanLewDev Dec 3, 2024
dea0176
make DRPState a type
JanLewDev Dec 3, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 158 additions & 15 deletions packages/object/src/hashgraph/index.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import * as crypto from "node:crypto";
import { log } from "../index.js";
import { linearizeMultiple } from "../linearize/multipleSemantics.js";
import { linearizePair } from "../linearize/pairSemantics.js";
import {
import { linearizeMultipleSemantics } from "../linearize/multipleSemantics.js";
import { linearizePairSemantics } from "../linearize/pairSemantics.js";
import type {
Vertex_Operation as Operation,
Vertex,
} from "../proto/drp/object/v1/object_pb.js";
import { BitSet } from "./bitset.js";

// Reexporting the Vertex and Operation types from the protobuf file
export { Vertex, Operation };
export type { Vertex, Operation };

export type Hash = string;

Expand Down Expand Up @@ -42,6 +42,11 @@ export type ResolveConflictsType = {
vertices?: Hash[];
};

export type VertexDistance = {
distance: number;
closestDependency?: Hash;
};

export class HashGraph {
nodeId: string;
resolveConflicts: (vertices: Vertex[]) => ResolveConflictsType;
Expand All @@ -62,6 +67,7 @@ export class HashGraph {
private arePredecessorsFresh = false;
private reachablePredecessors: Map<Hash, BitSet> = new Map();
private topoSortedIndex: Map<Hash, number> = new Map();
private vertexDistances: Map<Hash, VertexDistance> = new Map();
// We start with a bitset of size 1, and double it every time we reach the limit
private currentBitsetSize = 1;

Expand All @@ -86,6 +92,9 @@ export class HashGraph {
this.vertices.set(HashGraph.rootHash, rootVertex);
this.frontier.push(HashGraph.rootHash);
this.forwardEdges.set(HashGraph.rootHash, []);
this.vertexDistances.set(HashGraph.rootHash, {
distance: 0,
});
}

addToFrontier(operation: Operation): Vertex {
Expand All @@ -110,9 +119,24 @@ export class HashGraph {
this.forwardEdges.get(dep)?.push(hash);
}

// Compute the distance of the vertex
const vertexDistance: VertexDistance = {
distance: Number.MAX_VALUE,
closestDependency: "",
};
for (const dep of deps) {
const depDistance = this.vertexDistances.get(dep);
if (depDistance && depDistance.distance + 1 < vertexDistance.distance) {
vertexDistance.distance = depDistance.distance + 1;
vertexDistance.closestDependency = dep;
}
}
this.vertexDistances.set(hash, vertexDistance);

const depsSet = new Set(deps);
this.frontier = this.frontier.filter((hash) => !depsSet.has(hash));
this.arePredecessorsFresh = false;

return vertex;
}

Expand Down Expand Up @@ -149,22 +173,41 @@ export class HashGraph {
this.forwardEdges.get(dep)?.push(hash);
}

// Compute the distance of the vertex
const vertexDistance: VertexDistance = {
distance: Number.MAX_VALUE,
closestDependency: "",
};
for (const dep of deps) {
const depDistance = this.vertexDistances.get(dep);
if (depDistance && depDistance.distance + 1 < vertexDistance.distance) {
vertexDistance.distance = depDistance.distance + 1;
vertexDistance.closestDependency = dep;
}
}
this.vertexDistances.set(hash, vertexDistance);

const depsSet = new Set(deps);
this.frontier = this.frontier.filter((hash) => !depsSet.has(hash));
this.arePredecessorsFresh = false;
return hash;
}

depthFirstSearch(visited: Map<Hash, number> = new Map()): Hash[] {
depthFirstSearch(
origin: Hash,
subgraph: Set<Hash>,
visited: Map<Hash, number> = new Map(),
): Hash[] {
const result: Hash[] = [];
for (const vertex of this.getAllVertices()) {
visited.set(vertex.hash, DepthFirstSearchState.UNVISITED);
for (const hash of subgraph) {
visited.set(hash, DepthFirstSearchState.UNVISITED);
}
const visit = (hash: Hash) => {
visited.set(hash, DepthFirstSearchState.VISITING);

const children = this.forwardEdges.get(hash) || [];
for (const child of children) {
if (!subgraph.has(child)) continue;
if (visited.get(child) === DepthFirstSearchState.VISITING) {
log.error("::hashgraph::DFS: Cycle detected");
return;
Expand All @@ -182,16 +225,20 @@ export class HashGraph {
visited.set(hash, DepthFirstSearchState.VISITED);
};

visit(HashGraph.rootHash);
visit(origin);

return result;
}

topologicalSort(updateBitsets = false): Hash[] {
const result = this.depthFirstSearch();
/* Topologically sort the vertices in the whole hashgraph or the past of a given vertex. */
topologicalSort(
updateBitsets = false,
origin: Hash = HashGraph.rootHash,
subgraph: Set<Hash> = new Set(this.vertices.keys()),
): Hash[] {
const result = this.depthFirstSearch(origin, subgraph);
result.reverse();
if (!updateBitsets) return result;

this.reachablePredecessors.clear();
this.topoSortedIndex.clear();

Expand Down Expand Up @@ -221,17 +268,109 @@ export class HashGraph {
return result;
}

linearizeOperations(): Operation[] {
linearizeOperations(
origin: Hash = HashGraph.rootHash,
subgraph: Set<string> = new Set(this.vertices.keys()),
): Operation[] {
switch (this.semanticsType) {
case SemanticsType.pair:
return linearizePair(this);
return linearizePairSemantics(this, origin, subgraph);
case SemanticsType.multiple:
return linearizeMultiple(this);
return linearizeMultipleSemantics(this, origin, subgraph);
default:
return [];
}
}

lowestCommonAncestorMultipleVertices(
hashes: Hash[],
visited: Set<Hash>,
): Hash {
if (hashes.length === 0) {
throw new Error("Vertex dependencies are empty");
}
if (hashes.length === 1) {
return hashes[0];
}
let lca: Hash | undefined = hashes[0];
const targetVertices: Hash[] = [...hashes];
for (let i = 1; i < targetVertices.length; i++) {
if (!lca) {
throw new Error("LCA not found");
}
if (!visited.has(targetVertices[i])) {
lca = this.lowestCommonAncestorPairVertices(
lca,
targetVertices[i],
visited,
targetVertices,
);
}
}
if (!lca) {
throw new Error("LCA not found");
}
return lca;
}

private lowestCommonAncestorPairVertices(
hash1: Hash,
hash2: Hash,
visited: Set<Hash>,
targetVertices: Hash[],
): Hash | undefined {
let currentHash1 = hash1;
let currentHash2 = hash2;
visited.add(currentHash1);
visited.add(currentHash2);

while (currentHash1 !== currentHash2) {
const distance1 = this.vertexDistances.get(currentHash1);
if (!distance1) {
log.error("::hashgraph::LCA: Vertex not found");
return;
}
const distance2 = this.vertexDistances.get(currentHash2);
if (!distance2) {
log.error("::hashgraph::LCA: Vertex not found");
return;
}

if (distance1.distance > distance2.distance) {
if (!distance1.closestDependency) {
log.error("::hashgraph::LCA: Closest dependency not found");
return;
}
for (const dep of this.vertices.get(currentHash1)?.dependencies || []) {
if (dep !== distance1.closestDependency && !visited.has(dep)) {
targetVertices.push(dep);
}
}
currentHash1 = distance1.closestDependency;
if (visited.has(currentHash1)) {
return currentHash2;
}
visited.add(currentHash1);
} else {
if (!distance2.closestDependency) {
log.error("::hashgraph::LCA: Closest dependency not found");
return;
}
for (const dep of this.vertices.get(currentHash2)?.dependencies || []) {
if (dep !== distance2.closestDependency && !visited.has(dep)) {
targetVertices.push(dep);
}
}
currentHash2 = distance2.closestDependency;
if (visited.has(currentHash2)) {
return currentHash1;
}
visited.add(currentHash2);
}
}
return currentHash1;
}

areCausallyRelatedUsingBitsets(hash1: Hash, hash2: Hash): boolean {
if (!this.arePredecessorsFresh) {
this.topologicalSort(true);
Expand Down Expand Up @@ -303,7 +442,11 @@ export class HashGraph {
}

const visited = new Map<Hash, number>();
this.depthFirstSearch(visited);
this.depthFirstSearch(
HashGraph.rootHash,
new Set(this.vertices.keys()),
visited,
);
for (const vertex of this.getAllVertices()) {
if (!visited.has(vertex.hash)) {
return false;
Expand Down
Loading
Loading