Skip to content

Commit

Permalink
Fix arb and bsc firstBlock
Browse files Browse the repository at this point in the history
  • Loading branch information
prevostc committed Jan 13, 2025
1 parent 1f33563 commit f2af328
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 33 deletions.
72 changes: 42 additions & 30 deletions bin/check-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -339,30 +339,30 @@ async function main() {
// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// // write data files with missing holder counts
// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// for (const chain of Object.keys(dataFileContentPerChain)) {
// const fs = require("fs")
for (const chain of Object.keys(dataFileContentPerChain)) {
const fs = require("fs")

// // only if the chain has a config file
// if (!fs.existsSync(`./config/${chain}.json`)) {
// continue
// }
// only if the chain has a config file
if (!fs.existsSync(`./config/${chain}.json`)) {
continue
}

// const targetFile = `./data/${chain}_data.json`
// const dataFileContent = dataFileContentPerChain[chain]
// const existingDataFileContentIfAny = fs.existsSync(targetFile)
// ? JSON.parse(fs.readFileSync(targetFile, "utf8"))
// : { no_factory_vaults: [], no_factory_boosts: [] }
const targetFile = `./data/${chain}_data.json`
const dataFileContent = dataFileContentPerChain[chain]
const existingDataFileContentIfAny = fs.existsSync(targetFile)
? JSON.parse(fs.readFileSync(targetFile, "utf8"))
: { no_factory_vaults: [], no_factory_boosts: [] }

// dataFileContent.no_factory_vaults = dataFileContent.no_factory_vaults.concat(existingDataFileContentIfAny.no_factory_vaults)
// dataFileContent.no_factory_boosts = dataFileContent.no_factory_boosts.concat(existingDataFileContentIfAny.no_factory_boosts)
// dataFileContent.no_factory_vaults = Array.from(new Set(dataFileContent.no_factory_vaults))
// dataFileContent.no_factory_boosts = Array.from(new Set(dataFileContent.no_factory_boosts))
dataFileContent.no_factory_vaults = dataFileContent.no_factory_vaults.concat(existingDataFileContentIfAny.no_factory_vaults)
dataFileContent.no_factory_boosts = dataFileContent.no_factory_boosts.concat(existingDataFileContentIfAny.no_factory_boosts)
dataFileContent.no_factory_vaults = Array.from(new Set(dataFileContent.no_factory_vaults))
dataFileContent.no_factory_boosts = Array.from(new Set(dataFileContent.no_factory_boosts))

// dataFileContent.no_factory_vaults.sort()
// dataFileContent.no_factory_boosts.sort()
dataFileContent.no_factory_vaults.sort()
dataFileContent.no_factory_boosts.sort()

// fs.writeFileSync(targetFile, JSON.stringify(dataFileContent, null, 2))
// }
fs.writeFileSync(targetFile, JSON.stringify(dataFileContent, null, 2))
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// display top 30 missing TVL to focus on the most important vaults
Expand Down Expand Up @@ -488,7 +488,7 @@ async function main() {
})

if (!result.ok) {
console.error(`Failed to fetch data from subgraph: ${result.statusText}`)
console.error(`Failed to fetch data from ${subgraphUrl} subgraph ${chain} (${result.statusText}): ${gql}`)
console.error(await result.text())
continue
}
Expand All @@ -503,7 +503,8 @@ async function main() {
| { errors: { location: string[]; message: string }[] }

if ("errors" in resultData) {
console.error(`Failed to fetch data from subgraph: ${JSON.stringify(resultData.errors, null, 2)}`)
console.error(`Failed to fetch data from ${subgraphUrl} subgraph ${chain} (${result.statusText}): ${gql}`)
console.error(JSON.stringify(resultData.errors, null, 2))
continue
}

Expand All @@ -530,8 +531,12 @@ async function main() {
}

const gql = `{
tokens(where: {balances_: {rawAmount_lt: 0}}) {
id
tokenBalances(where: {rawAmount_lt: 0}, skip: 0, first: 1000) {
token {
id
}
amount
rawAmount
}
}`

Expand All @@ -544,36 +549,43 @@ async function main() {
})

if (!result.ok) {
console.error(`Failed to fetch data from subgraph ${chain}: ${result.statusText}`)
console.error(`Failed to fetch data from ${subgraphUrl} subgraph ${chain} (${result.statusText}): ${gql}`)
console.error(await result.text())
continue
}

const resultData = (await result.json()) as
| {
data: {
tokens: {
id: string
tokenBalances: {
token: {
id: string
}
amount: string
rawAmount: string
}[]
}
}
| { errors: { location: string[]; message: string }[] }

if ("errors" in resultData) {
console.error(`Failed to fetch data from subgraph: ${JSON.stringify(resultData.errors, null, 2)}`)
console.error(`Failed to fetch data from ${subgraphUrl} subgraph ${chain} (${result.statusText}): ${gql}`)
console.error(JSON.stringify(resultData.errors, null, 2))
continue
}

if (resultData.data.tokens.length > 0) {
if (resultData.data.tokenBalances.length > 0) {
const duneChain = chain === "bsc" ? "bnb" : chain === "avax" ? "avalanche_c" : chain
const duneQuery = `
SELECT to, min(block_number)
FROM ${duneChain}.transactions
WHERE ${resultData.data.tokens.map((t) => `to = ${t.id}`).join("\n OR ")}
WHERE ${uniq(resultData.data.tokenBalances.map((t) => t.token.id))
.map((t) => `to = ${t}`)
.join("\n OR ")}
group by to
order by min(block_number)
`
console.error(`${chain}: Found ${resultData.data.tokens.length} tokens with balances below 0, please fix firstBlock in config.
console.error(`${chain}: Found ${resultData.data.tokenBalances.length} token balances with balances below 0, please fix firstBlock in config.
${duneQuery}
`)
}
Expand Down
2 changes: 1 addition & 1 deletion config/arbitrum.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"indexerHintPrune": 20000000,
"shareTokenMintAddress": "0x0000000000000000000000000000000000000000",
"burnAddress": "0x000000000000000000000000000000000000dead",
"firstBlock": 1365253,
"firstBlock": 1362980,

"clmManagerFactoryAddress": "0xD41Ce2c0a0596635FC09BDe2C35946a984b8cB7A",
"clmManagerFactoryStartBlock": 219696027,
Expand Down
2 changes: 1 addition & 1 deletion config/bsc.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"indexerHintPrune": 2000000,
"shareTokenMintAddress": "0x0000000000000000000000000000000000000000",
"burnAddress": "0x000000000000000000000000000000000000dead",
"firstBlock": 1573192,
"firstBlock": 1174850,

"clmManagerFactoryAddress": "0xAe8b53413862984C4e10929D41735800E0A4EdF9",
"clmManagerFactoryStartBlock": 40991490,
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"test": "yarn run --silent test:lint && yarn run --silent test:unit",
"test:unit": "echo 'No unit tests defined'",
"test:lint": "prettier . --check",
"test:config": "ts-node --project tsconfig.scripts.json ./bin/check-config.ts",
"test:config": "ts-node --project tsconfig.scripts.json ./bin/check-config.ts; yarn format",
"update:addressbook": "ncu --upgrade blockchain-addressbook viem && yarn install",
"configure": "./bin/prepare.sh "
},
Expand Down

0 comments on commit f2af328

Please sign in to comment.