diff --git a/.changeset/funny-pigs-admire.md b/.changeset/funny-pigs-admire.md new file mode 100644 index 000000000..d0bda597c --- /dev/null +++ b/.changeset/funny-pigs-admire.md @@ -0,0 +1,5 @@ +--- +"@blobscan/rest-api-server": minor +--- + +Removed syncers diff --git a/.env.example b/.env.example index 420f61daa..f10cc1d05 100644 --- a/.env.example +++ b/.env.example @@ -11,6 +11,7 @@ DIRECT_URL=postgresql://blobscan:s3cr3t@localhost:5432/blobscan_dev?schema=publi BLOBSCAN_WEB_TAG=next BLOBSCAN_API_TAG=next +BLOBSCAN_JOBS_TAG=next INDEXER_TAG=master ### blobscan website diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 3438d959b..1b0500388 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -84,6 +84,32 @@ jobs: DATABASE_URL=${{ env.DATABASE_URL }} DIRECT_URL=${{ env.DIRECT_URL }} + - name: Extract metadata (tags, labels) for Docker (jobs) + id: meta_jobs + uses: docker/metadata-action@v5 + with: + images: | + blossomlabs/blobscan-jobs + tags: | + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + + - name: Build and push (jobs) + uses: docker/build-push-action@v6.5.0 + with: + context: . + push: true + target: jobs + tags: ${{ steps.meta_api.outputs.tags }} + labels: ${{ steps.meta_api.outputs.labels }} + build-args: | + NEXT_PUBLIC_BLOBSCAN_RELEASE=${{ env.NEXT_PUBLIC_BLOBSCAN_RELEASE }} + BUILD_TIMESTAMP=${{ env.BUILD_TIMESTAMP }} + GIT_COMMIT=${{ env.GIT_COMMIT }} + DATABASE_URL=${{ env.DATABASE_URL }} + DIRECT_URL=${{ env.DIRECT_URL }} + - name: Extract metadata (tags, labels) for Docker (Web) id: meta_web uses: docker/metadata-action@v5 diff --git a/.github/workflows/docker_dev.yml b/.github/workflows/docker_dev.yml index a973614fc..fdf607fef 100644 --- a/.github/workflows/docker_dev.yml +++ b/.github/workflows/docker_dev.yml @@ -77,6 +77,21 @@ jobs: DATABASE_URL=${{ env.DATABASE_URL }} DIRECT_URL=${{ env.DIRECT_URL }} + - name: Build and push (jobs) + uses: docker/build-push-action@v6.5.0 + with: + context: . + push: true + target: jobs + tags: | + blossomlabs/blobscan-jobs:development + build-args: | + NEXT_PUBLIC_BLOBSCAN_RELEASE=${{ env.NEXT_PUBLIC_BLOBSCAN_RELEASE }} + BUILD_TIMESTAMP=${{ env.BUILD_TIMESTAMP }} + GIT_COMMIT=${{ env.GIT_COMMIT }} + DATABASE_URL=${{ env.DATABASE_URL }} + DIRECT_URL=${{ env.DIRECT_URL }} + - name: Build and push (web) uses: docker/build-push-action@v6.5.0 with: diff --git a/Dockerfile b/Dockerfile index da4354866..d094fb05b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,6 +29,7 @@ WORKDIR /prepare RUN turbo prune @blobscan/web --docker --out-dir /prepare/web RUN turbo prune @blobscan/rest-api-server --docker --out-dir /prepare/api +RUN turbo prune @blobscan/jobs --docker --out-dir /prepare/jobs # stage: web-builder FROM deps AS web-builder @@ -116,3 +117,39 @@ ENV PORT=3001 ADD docker-entrypoint.sh / ENTRYPOINT ["/docker-entrypoint.sh"] CMD ["api"] + +# stage: jobs-builder +FROM deps AS jobs-builder + +WORKDIR /app + +ARG DATABASE_URL +ARG DIRECT_URL + +COPY --from=deps /prepare/jobs/json . +COPY --from=deps /prepare/jobs/pnpm-lock.yaml . + +RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm fetch -r +RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile + +COPY --from=deps /prepare/jobs/full . + +# Copy original which includes pipelines +COPY --from=deps /prepare/turbo.json . +RUN --mount=type=cache,id=pnpm,target=/pnpm/store DATABASE_URL=${DATABASE_URL} DIRECT_URL=${DIRECT_URL} pnpm build --filter=@blobscan/jobs + +# stage: jobs +FROM base AS jobs + +WORKDIR /app + +ENV NODE_ENV=production + +COPY --from=jobs-builder /app/node_modules/.prisma ./node_modules/.prisma +COPY --from=jobs-builder /app/node_modules/prisma ./node_modules/prisma +COPY --from=jobs-builder /app/node_modules/@prisma ./node_modules/@prisma +COPY --from=jobs-builder /app/apps/jobs/dist ./ + +ADD docker-entrypoint.sh / +ENTRYPOINT ["/docker-entrypoint.sh"] +CMD ["jobs"] \ No newline at end of file diff --git a/apps/docs/package.json b/apps/docs/package.json index 2906207bb..22ee99517 100644 --- a/apps/docs/package.json +++ b/apps/docs/package.json @@ -21,7 +21,7 @@ "@tailwindcss/typography": "^0.5.7", "autoprefixer": "^10.4.14", "clsx": "^2.1.0", - "fast-glob": "^3.2.12", + "fast-glob": "^3.3.2", "flexsearch": "^0.7.31", "js-yaml": "^4.1.0", "next": "^14.2.30", diff --git a/apps/docs/src/app/docs/background-jobs/page.md b/apps/docs/src/app/docs/background-jobs/page.md index b4e9babba..632d09774 100644 --- a/apps/docs/src/app/docs/background-jobs/page.md +++ b/apps/docs/src/app/docs/background-jobs/page.md @@ -8,10 +8,10 @@ nextjs: Blobscan requires [BullMQ](https://bullmq.io/) to run the following tasks in the background: -- `DailyStatsSyncer` - calculates metrics for the blobs, block and transactions charts. -- `OverallStatsSyncer` - calculates other metrics such as Total Tx Fees Saved, Total Blocks and Total Blobs. -- `SwarmStampSyncer` - updates TTL for Ethereum Swarm batches. +- `DailyStatsCronJob` - calculates metrics for the blobs, block and transactions charts. +- `OverallStatsCronJob` - calculates other metrics such as Total Tx Fees Saved, Total Blocks and Total Blobs. +- `SwarmStampCronJob` - updates TTL for Ethereum Swarm batches. -For more information, check out the [@blobscan/syncers](https://github.com/Blobscan/blobscan/tree/main/packages/syncers/src/syncers) package. +For more information, check out the [@blobscan/jobs](https://github.com/Blobscan/blobscan/tree/main/apps/jobs) app. The BullMQ queue is also used to upload blobs in parallel to different Storages. diff --git a/apps/docs/src/app/docs/codebase-overview/page.md b/apps/docs/src/app/docs/codebase-overview/page.md index 7b7506ec4..4b59cc24f 100644 --- a/apps/docs/src/app/docs/codebase-overview/page.md +++ b/apps/docs/src/app/docs/codebase-overview/page.md @@ -14,6 +14,7 @@ Blobscan.com is comprised of the following major components: - **Web App**: A [Next.js](https://nextjs.org/) application hosted on [Vercel](https://vercel.com/) that spins up a [tRPC API](https://trpc.io) that communicates with the database via [Prisma](https://www.prisma.io/). It also uses [Tailwind CSS](https://tailwindcss.com/) for styling. - **REST API**: An express app that runs the tRPC API with [OpenAPI](https://www.openapis.org/) support. It exposes some of the tRPC API endpoints as REST endpoints for the public and external services such as the indexer. +- **Jobs**: A Node.js app running the following BullMQ background cron jobs: eth usd price syncing, stats aggregation and swarm stamp syncing. - **Indexer**: A Rust service that listens to the Ethereum blockchain looking for blocks and transactions containing blobs and forwards them to the REST API to be indexed. {% figure src="/architecture.svg" appendCurrentTheme=true /%} @@ -33,6 +34,7 @@ Blobscan is composed of the following apps: |  [`@blobscan/docs`](https://github.com/Blobscan/blobscan/tree/main/apps/docs) | Nextjs app that contains the documentation. | |  [`@blobscan/web`](https://github.com/Blobscan/blobscan/tree/main/apps/web) | Nextjs app that contains the web app. | |  [`@blobscan/rest-api-server`](https://github.com/Blobscan/blobscan/tree/main/apps/rest-api-server) | Express app that contains the REST API. | +|  [`@blobscan/jobs`](https://github.com/Blobscan/blobscan/tree/main/apps/jobs) | Node.js app running BullMQ cron jobs. | ### CLI diff --git a/packages/syncers/CHANGELOG.md b/apps/jobs/CHANGELOG.md similarity index 99% rename from packages/syncers/CHANGELOG.md rename to apps/jobs/CHANGELOG.md index 4c24f4fc3..06e1bac6c 100644 --- a/packages/syncers/CHANGELOG.md +++ b/apps/jobs/CHANGELOG.md @@ -1,4 +1,4 @@ -# @blobscan/syncers +# @blobscan/jobs ## 0.5.0 diff --git a/apps/jobs/esbuild.config.mjs b/apps/jobs/esbuild.config.mjs new file mode 100644 index 000000000..47035f2fa --- /dev/null +++ b/apps/jobs/esbuild.config.mjs @@ -0,0 +1,19 @@ +// esbuild.config.js +import * as esbuild from "esbuild"; +import fs from "fs"; + +const result = await esbuild.build({ + entryPoints: ["src/index.ts"], + outdir: "dist", + outbase: "src", + platform: "node", + target: "node20", + format: "cjs", + metafile: !!process.env.BUILD_METADATA_ENABLED, + bundle: true, + external: [".prisma", "prisma", "@prisma/client"], +}); + +if (process.env.BUILD_METADATA_ENABLED) { + fs.writeFileSync("dist/meta.json", JSON.stringify(result.metafile)); +} diff --git a/apps/jobs/package.json b/apps/jobs/package.json new file mode 100644 index 000000000..3cd0006ea --- /dev/null +++ b/apps/jobs/package.json @@ -0,0 +1,45 @@ +{ + "name": "@blobscan/jobs", + "version": "0.5.0", + "private": true, + "scripts": { + "build": "node esbuild.config.mjs", + "build:metadata": "BUILD_METADATA_ENABLED=true pnpm build", + "clean": "git clean -xdf node_modules", + "dev": "pnpm with-env tsx src/index.ts", + "lint": "eslint .", + "lint:fix": "pnpm lint --fix", + "start": "pnpm with-env node dist/index.js", + "test": "pnpm with-env:test vitest", + "test:ui": "pnpm with-env:test vitest --ui", + "with-env:test": "dotenv -e ../../.env.test --", + "with-env": "dotenv -e ../../.env --", + "type-check": "tsc --noEmit" + }, + "keywords": [], + "license": "MIT", + "dependencies": { + "@blobscan/dates": "workspace:^0.0.1", + "@blobscan/dayjs": "workspace:^0.1.0", + "@blobscan/db": "workspace:^0.18.0", + "@blobscan/logger": "workspace:^0.1.2", + "@blobscan/price-feed": "workspace:^0.1.0", + "@blobscan/zod": "workspace:^0.1.0", + "axios": "^1.7.2", + "bullmq": "^4.13.2", + "ioredis": "^5.3.2", + "viem": "^2.17.4" + }, + "devDependencies": { + "concurrently": "^9.2.0", + "esbuild": "0.25.5", + "fast-glob": "^3.3.2", + "tsx": "^4.19.2" + }, + "eslintConfig": { + "root": true, + "extends": [ + "@blobscan/eslint-config/base" + ] + } +} diff --git a/packages/syncers/src/BaseSyncer.ts b/apps/jobs/src/cron-jobs/BaseCronJob.ts similarity index 65% rename from packages/syncers/src/BaseSyncer.ts rename to apps/jobs/src/cron-jobs/BaseCronJob.ts index 48949a46e..2f1ca8b42 100644 --- a/packages/syncers/src/BaseSyncer.ts +++ b/apps/jobs/src/cron-jobs/BaseCronJob.ts @@ -1,48 +1,59 @@ /* eslint-disable @typescript-eslint/no-misused-promises */ +import type { Processor } from "bullmq"; import { Queue, Worker } from "bullmq"; import type { Redis } from "ioredis"; import { createModuleLogger } from "@blobscan/logger"; import type { Logger } from "@blobscan/logger"; -import { ErrorException, SyncerError } from "./errors"; -import { createRedisConnection } from "./utils"; +import { ErrorException } from "../errors"; +import { createRedis } from "../redis"; -export interface CommonSyncerConfig { +export interface CommonCronJobConfig { redisUriOrConnection: Redis | string; cronPattern: string; } -export interface BaseSyncerConfig extends CommonSyncerConfig { +export interface BaseCronJobConfig extends CommonCronJobConfig { name: string; - syncerFn: () => Promise; + processor: Processor; + jobData?: Record; } -export class BaseSyncer { +export class CronJobError extends ErrorException { + constructor(cronJobName: string, message: string, cause?: unknown) { + super(`Cron job "${cronJobName}" failed: ${message}`, cause); + } +} + +export class BaseCronJob { name: string; cronPattern: string; - protected syncerFn: () => Promise; protected logger: Logger; protected connection: Redis; - protected worker: Worker | undefined; - protected queue: Queue | undefined; + protected worker?: Worker; + protected queue?: Queue; + + protected jobData?: Record; constructor({ name, cronPattern, redisUriOrConnection, - syncerFn, - }: BaseSyncerConfig) { - this.name = `${name}-syncer`; + processor: processorFile, + jobData, + }: BaseCronJobConfig) { + this.name = `${name}-cron-job`; this.cronPattern = cronPattern; this.logger = createModuleLogger(this.name); + this.jobData = jobData; let connection: Redis; if (typeof redisUriOrConnection === "string") { - connection = createRedisConnection(redisUriOrConnection); + connection = createRedis(redisUriOrConnection); } else { connection = redisUriOrConnection; } @@ -51,7 +62,7 @@ export class BaseSyncer { connection, }); - this.worker = new Worker(this.queue.name, syncerFn, { + this.worker = new Worker(this.queue.name, processorFile, { connection, }); @@ -64,25 +75,24 @@ export class BaseSyncer { }); this.connection = connection; - this.syncerFn = syncerFn; } async start() { try { const jobName = `${this.name}-job`; - const repeatableJob = await this.queue?.add(jobName, null, { + const repeatableJob = await this.queue?.add(jobName, this.jobData, { repeat: { pattern: this.cronPattern, }, }); - this.logger.info("Syncer started successfully"); + this.logger.debug("Cron job started successfully"); return repeatableJob; } catch (err) { - throw new SyncerError( + throw new CronJobError( this.name, - "An error ocurred when starting syncer", + "An error ocurred when starting cron job", err ); } @@ -107,7 +117,7 @@ export class BaseSyncer { this.queue?.removeAllListeners().close() ); - this.logger.info("Syncer closed successfully"); + this.logger.info("Cron job closed successfully"); }); } @@ -115,7 +125,7 @@ export class BaseSyncer { try { await operation(); } catch (err) { - const err_ = new SyncerError( + const err_ = new CronJobError( this.name, "An error ocurred when performing closing operation", err diff --git a/apps/jobs/src/cron-jobs/daily-stats/DailyStatsCronJob.ts b/apps/jobs/src/cron-jobs/daily-stats/DailyStatsCronJob.ts new file mode 100644 index 000000000..a7dbd56bc --- /dev/null +++ b/apps/jobs/src/cron-jobs/daily-stats/DailyStatsCronJob.ts @@ -0,0 +1,46 @@ +import { formatDate } from "../../utils"; +import { BaseCronJob } from "../BaseCronJob"; +import type { CommonCronJobConfig } from "../BaseCronJob"; +import dailyStats from "./processor"; +import type { DailyStatsJobResult } from "./types"; + +export type DailyStatsCronJobConfig = CommonCronJobConfig; + +export class DailyStatsCronJob extends BaseCronJob { + constructor({ redisUriOrConnection, cronPattern }: DailyStatsCronJobConfig) { + const name = "daily-stats"; + + super({ + name, + redisUriOrConnection, + cronPattern, + processor: dailyStats, + }); + + this.worker?.on("completed", (_, result) => { + const result_ = result as DailyStatsJobResult; + + if (!result_) { + this.logger.info( + "Daily stats aggregation skipped: no blocks indexed yet" + ); + + return; + } + + const { fromDate, toDate, totalAggregationsCreated } = result_; + + if (fromDate === toDate) { + this.logger.info(`Daily stats aggregation skipped: already up to date`); + + return; + } + + this.logger.info( + `Daily data up to day ${formatDate( + toDate + )} aggregated. ${totalAggregationsCreated} daily stats created successfully.` + ); + }); + } +} diff --git a/apps/jobs/src/cron-jobs/daily-stats/processor.ts b/apps/jobs/src/cron-jobs/daily-stats/processor.ts new file mode 100644 index 000000000..b9be4c229 --- /dev/null +++ b/apps/jobs/src/cron-jobs/daily-stats/processor.ts @@ -0,0 +1,45 @@ +import { normalizeDate, toDailyDate } from "@blobscan/dayjs"; +import { prisma } from "@blobscan/db"; + +import type { DailyStatsJobResult } from "./types"; + +export default async (): Promise => { + const lastIndexedBlock = await prisma.block.findLatest(); + + if (!lastIndexedBlock) { + return; + } + const targetDate = normalizeDate(lastIndexedBlock.timestamp).subtract( + 1, + "day" + ); + const targetDay = toDailyDate(targetDate); + + const rawLastDailyStatsDay = await prisma.dailyStats.findFirst({ + select: { day: true }, + where: { category: null, rollup: null }, + orderBy: { day: "desc" }, + }); + const lastDailyStatsDay = rawLastDailyStatsDay?.day + ? normalizeDate(rawLastDailyStatsDay.day) + : undefined; + + if (lastDailyStatsDay && lastDailyStatsDay.isSame(targetDay, "day")) { + return { + fromDate: lastDailyStatsDay.utc().toISOString(), + toDate: targetDay.utc().toISOString(), + totalAggregationsCreated: 0, + }; + } + + const [blobDailyStats] = await prisma.dailyStats.aggregate({ + from: lastDailyStatsDay?.add(1, "day"), + to: targetDay, + }); + + return { + fromDate: lastDailyStatsDay?.utc().toISOString(), + toDate: targetDate.utc().toISOString(), + totalAggregationsCreated: blobDailyStats.length, + }; +}; diff --git a/apps/jobs/src/cron-jobs/daily-stats/types.ts b/apps/jobs/src/cron-jobs/daily-stats/types.ts new file mode 100644 index 000000000..fe9c08e6e --- /dev/null +++ b/apps/jobs/src/cron-jobs/daily-stats/types.ts @@ -0,0 +1,14 @@ +import type { SandboxedJob } from "bullmq"; + +export type DailyStatsJobResult = + | { + fromDate?: string; + toDate: string; + totalAggregationsCreated: number; + } + | undefined; + +export type DailyStatsSanboxedJob = SandboxedJob< + undefined, + DailyStatsJobResult +>; diff --git a/apps/jobs/src/cron-jobs/eth-price/EthPriceCronJob.ts b/apps/jobs/src/cron-jobs/eth-price/EthPriceCronJob.ts new file mode 100644 index 000000000..49b00aeb8 --- /dev/null +++ b/apps/jobs/src/cron-jobs/eth-price/EthPriceCronJob.ts @@ -0,0 +1,39 @@ +import type { PriceFeed } from "@blobscan/price-feed"; + +import type { CommonCronJobConfig } from "../BaseCronJob"; +import { BaseCronJob } from "../BaseCronJob"; +import ethPrice from "./processor"; + +export interface EthPriceCronJobConfig extends CommonCronJobConfig { + ethUsdPriceFeed: PriceFeed; +} + +export class EthPriceCronJob extends BaseCronJob { + constructor({ ...restConfig }: CommonCronJobConfig) { + super({ + ...restConfig, + name: "eth-price", + processor: ethPrice, + }); + + this.worker?.on("completed", (_, result?) => { + const { price, timestamp, roundId } = result as { + price?: number; + timestamp: string; + roundId?: string; + }; + + if (!price && !roundId) { + this.logger.warn( + `Skipping eth price update: No price data found for datetime ${timestamp}` + ); + + return; + } + + this.logger.info( + `ETH price indexed: $${price} at ${timestamp} recorded (retrieved from round ${roundId})` + ); + }); + } +} diff --git a/apps/jobs/src/cron-jobs/eth-price/price-feed.ts b/apps/jobs/src/cron-jobs/eth-price/price-feed.ts new file mode 100644 index 000000000..bdaa000e6 --- /dev/null +++ b/apps/jobs/src/cron-jobs/eth-price/price-feed.ts @@ -0,0 +1,31 @@ +import * as chains from "viem/chains"; + +import { PriceFeed } from "@blobscan/price-feed"; + +import { env } from "../../env"; +import { client } from "./viem"; + +let priceFeed: PriceFeed | undefined; + +export async function getEthUsdPriceFeed() { + if (!priceFeed) { + const chain = Object.values(chains).find( + (c) => c.id === env.ETH_PRICE_SYNCER_CHAIN_ID + ); + + if (!chain) { + throw new Error( + `Can't initialize ETH price syncer: chain with id ${env.ETH_PRICE_SYNCER_CHAIN_ID} not found` + ); + } + + priceFeed = await PriceFeed.create({ + client, + dataFeedContractAddress: + env.ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS as `0x${string}`, + timeTolerance: env.ETH_PRICE_SYNCER_TIME_TOLERANCE, + }); + } + + return priceFeed; +} diff --git a/apps/jobs/src/cron-jobs/eth-price/processor.ts b/apps/jobs/src/cron-jobs/eth-price/processor.ts new file mode 100644 index 000000000..af83670f8 --- /dev/null +++ b/apps/jobs/src/cron-jobs/eth-price/processor.ts @@ -0,0 +1,45 @@ +import dayjs, { normalizeDate } from "@blobscan/dayjs"; +import { prisma } from "@blobscan/db"; + +import { getEthUsdPriceFeed } from "./price-feed"; +import type { EthPriceJob } from "./types"; + +export default async (job: EthPriceJob) => { + const now = normalizeDate(dayjs()); + + const targetDateTime = now.startOf(job.data.granularity); + + const ethUsdPriceFeed = await getEthUsdPriceFeed(); + + const priceData = await ethUsdPriceFeed.findPriceByTimestamp( + targetDateTime.unix() + ); + + if (!priceData) { + return { + timestamp: targetDateTime.toISOString(), + }; + } + + const roundId = priceData.roundId.toString(); + const price = priceData.price; + const priceTimestamp = targetDateTime.toDate(); + const priceRow = { + price, + timestamp: priceTimestamp, + }; + + await prisma.ethUsdPrice.upsert({ + create: priceRow, + update: priceRow, + where: { + timestamp: priceTimestamp, + }, + }); + + return { + price, + timestamp: targetDateTime.toISOString(), + roundId, + }; +}; diff --git a/apps/jobs/src/cron-jobs/eth-price/types.ts b/apps/jobs/src/cron-jobs/eth-price/types.ts new file mode 100644 index 000000000..c61bb4fbb --- /dev/null +++ b/apps/jobs/src/cron-jobs/eth-price/types.ts @@ -0,0 +1,11 @@ +import type { Job } from "bullmq"; + +export type Granularity = "minute" | "hour" | "day"; + +export type EthPriceJobResult = { + timestamp: string; + price?: number; + roundId?: string; +}; + +export type EthPriceJob = Job<{ granularity: Granularity }, EthPriceJobResult>; diff --git a/apps/jobs/src/cron-jobs/eth-price/viem.ts b/apps/jobs/src/cron-jobs/eth-price/viem.ts new file mode 100644 index 000000000..33b32e6d7 --- /dev/null +++ b/apps/jobs/src/cron-jobs/eth-price/viem.ts @@ -0,0 +1,20 @@ +import type { PublicClient } from "viem"; +import { createPublicClient, http } from "viem"; +import * as chains from "viem/chains"; + +import { env } from "../../env"; + +const chain = Object.values(chains).find( + (c) => c.id === env.ETH_PRICE_SYNCER_CHAIN_ID +); + +if (!chain) { + throw new Error( + `Failed to initialize eth usd price feed: chain with id ${env.ETH_PRICE_SYNCER_CHAIN_ID} not found` + ); +} + +export const client = createPublicClient({ + chain, + transport: http(env.ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL), +}) as PublicClient; diff --git a/apps/jobs/src/cron-jobs/overall-stats/OverallStatsCronJob.ts b/apps/jobs/src/cron-jobs/overall-stats/OverallStatsCronJob.ts new file mode 100644 index 000000000..06263008a --- /dev/null +++ b/apps/jobs/src/cron-jobs/overall-stats/OverallStatsCronJob.ts @@ -0,0 +1,58 @@ +import { BaseCronJob } from "../BaseCronJob"; +import type { CommonCronJobConfig } from "../BaseCronJob"; +import overallStats from "./processor"; +import type { OverallStatsJobResult } from "./types"; + +export interface OverallStatsCronJobConfig extends CommonCronJobConfig { + forkSlot: number; + batchSize?: number; +} + +export class OverallStatsCronJob extends BaseCronJob { + constructor({ + cronPattern, + redisUriOrConnection, + forkSlot, + batchSize, + }: OverallStatsCronJobConfig) { + const name = "overall-stats"; + super({ + name, + cronPattern, + redisUriOrConnection, + processor: overallStats, + jobData: { + forkSlot, + batchSize, + }, + }); + + this.worker?.on("completed", (_, result) => { + const result_ = result as OverallStatsJobResult; + + if (!result_) { + this.logger.debug( + "Stats aggregation skipped: chain hasn't been fully indexed yet" + ); + + return; + } + + const { fromBlock, toBlock } = result_; + + if (fromBlock >= toBlock) { + this.logger.debug( + `Stats aggregation skipped: no new finalized blocks (last aggregated block: ${ + fromBlock - 1 + })` + ); + + return; + } + + this.logger.info( + `Data from block ${fromBlock.toLocaleString()} up to ${toBlock.toLocaleString()} aggregated successfully.` + ); + }); + } +} diff --git a/apps/jobs/src/cron-jobs/overall-stats/processor.ts b/apps/jobs/src/cron-jobs/overall-stats/processor.ts new file mode 100644 index 000000000..07c661d89 --- /dev/null +++ b/apps/jobs/src/cron-jobs/overall-stats/processor.ts @@ -0,0 +1,68 @@ +import type { BlockNumberRange } from "@blobscan/db"; +import { prisma } from "@blobscan/db"; + +import { isUnset } from "../../utils"; +import type { OverallStatsJobResult, OverallStatsJob } from "./types"; + +const BATCH_SIZE = 2_000_000; + +export default async ({ + data: { batchSize = BATCH_SIZE, forkSlot }, +}: OverallStatsJob): Promise => { + const [blockchainSyncState, latestBlock] = await Promise.all([ + prisma.blockchainSyncState.findUnique({ + select: { + lastLowerSyncedSlot: true, + lastAggregatedBlock: true, + lastFinalizedBlock: true, + }, + where: { + id: 1, + }, + }), + prisma.block.findLatest(), + ]); + const { lastAggregatedBlock, lastFinalizedBlock, lastLowerSyncedSlot } = + blockchainSyncState ?? {}; + const lastIndexedBlockNumber = latestBlock?.number; + + if ( + isUnset(lastIndexedBlockNumber) || + isUnset(lastFinalizedBlock) || + lastLowerSyncedSlot !== forkSlot + ) { + return; + } + + const blockRange: BlockNumberRange = { + from: lastAggregatedBlock ? lastAggregatedBlock + 1 : 0, + to: Math.min(lastFinalizedBlock, lastIndexedBlockNumber), + }; + + if (blockRange.from >= blockRange.to) { + return { + fromBlock: blockRange.from, + toBlock: blockRange.to, + }; + } + + const { from, to } = blockRange; + const unprocessedBlocks = to - from + 1; + const batches = Math.ceil(unprocessedBlocks / batchSize); + + for (let i = 0; i < batches; i++) { + const batchFrom = from + i * batchSize; + const batchTo = Math.min(batchFrom + batchSize - 1, to); + const batchBlockRange: BlockNumberRange = { + from: batchFrom, + to: batchTo, + }; + + await prisma.overallStats.aggregate({ blockRange: batchBlockRange }); + } + + return { + fromBlock: blockRange.from, + toBlock: blockRange.to, + }; +}; diff --git a/apps/jobs/src/cron-jobs/overall-stats/types.ts b/apps/jobs/src/cron-jobs/overall-stats/types.ts new file mode 100644 index 000000000..c2cc72b76 --- /dev/null +++ b/apps/jobs/src/cron-jobs/overall-stats/types.ts @@ -0,0 +1,13 @@ +import type { Job } from "bullmq"; + +export type OverallStatsJobResult = + | { + fromBlock: number; + toBlock: number; + } + | undefined; + +export type OverallStatsJob = Job< + { forkSlot: number; batchSize?: number }, + OverallStatsJobResult +>; diff --git a/apps/jobs/src/cron-jobs/swarm-stamp/SwarmStampCronJob.ts b/apps/jobs/src/cron-jobs/swarm-stamp/SwarmStampCronJob.ts new file mode 100644 index 000000000..2a0badc44 --- /dev/null +++ b/apps/jobs/src/cron-jobs/swarm-stamp/SwarmStampCronJob.ts @@ -0,0 +1,63 @@ +import { prisma } from "@blobscan/db"; + +import type { CommonCronJobConfig } from "../BaseCronJob"; +import { BaseCronJob, CronJobError } from "../BaseCronJob"; +import swarmStamp from "./processor"; +import type { SwarmStampJobResult } from "./types"; + +export interface SwarmStampCronJobConfig extends CommonCronJobConfig { + beeEndpoint: string; + batchId?: string; +} + +export class SwarmStampCronJob extends BaseCronJob { + constructor({ + cronPattern, + redisUriOrConnection, + batchId, + beeEndpoint, + }: SwarmStampCronJobConfig) { + const name = "swarm-stamp"; + super({ + name, + cronPattern, + redisUriOrConnection, + processor: swarmStamp, + jobData: { + batchId, + beeEndpoint, + }, + }); + + this.worker?.on("completed", (_, result) => { + const { batchId, newExpirationDate } = result as SwarmStampJobResult; + + this.logger.info( + `Swarm batch ID "${batchId}" updated. New expiry date: ${newExpirationDate}.` + ); + }); + } + + async start() { + const batchId = this.jobData?.batchId; + + if (!batchId) { + const blobStorageState = await prisma.blobStoragesState.findUnique({ + where: { + id: 1, + }, + }); + + if (!blobStorageState?.swarmDataId) { + throw new CronJobError(this.name, "No swarm batch id found"); + } + + this.jobData = { + ...(this.jobData ?? {}), + batchId: blobStorageState.swarmDataId, + }; + } + + return super.start(); + } +} diff --git a/packages/syncers/src/errors.ts b/apps/jobs/src/cron-jobs/swarm-stamp/errors.ts similarity index 68% rename from packages/syncers/src/errors.ts rename to apps/jobs/src/cron-jobs/swarm-stamp/errors.ts index 0c8ddd289..7b8000e7c 100644 --- a/packages/syncers/src/errors.ts +++ b/apps/jobs/src/cron-jobs/swarm-stamp/errors.ts @@ -2,21 +2,7 @@ import type { AxiosError } from "axios"; import { z } from "@blobscan/zod"; -export class ErrorException extends Error { - constructor(message: string, cause?: unknown) { - super(message, { - cause, - }); - - this.name = this.constructor.name; - } -} - -export class SyncerError extends ErrorException { - constructor(syncerName: string, message: string, cause: unknown) { - super(`Syncer "${syncerName}" failed: ${message}`, cause); - } -} +import { ErrorException } from "../../errors"; const swarmApiResponseErrorSchema = z.object({ code: z.number(), diff --git a/apps/jobs/src/cron-jobs/swarm-stamp/processor.ts b/apps/jobs/src/cron-jobs/swarm-stamp/processor.ts new file mode 100644 index 000000000..a78b67ca8 --- /dev/null +++ b/apps/jobs/src/cron-jobs/swarm-stamp/processor.ts @@ -0,0 +1,59 @@ +import type { AxiosResponse } from "axios"; +import axios, { AxiosError } from "axios"; + +import { formatTtl } from "@blobscan/dates"; +import { prisma } from "@blobscan/db"; + +import { SwarmNodeError } from "./errors"; +import type { SwarmStampJobResult, SwarmStampJob } from "./types"; + +type BatchDataGetResponse = { + batchID: string; + batchTTL: number; +}; + +export default async ({ + data: { batchId, beeEndpoint }, +}: SwarmStampJob): Promise => { + let response: AxiosResponse; + + try { + const url = `${beeEndpoint}/stamps/${batchId}`; + + response = await axios.get(url); + } catch (err) { + let cause = err; + + if (err instanceof AxiosError) { + cause = new SwarmNodeError(err); + } + + throw new Error(`Failed to fetch stamp batch "${batchId}"`, { + cause, + }); + } + + const { batchTTL } = response.data; + + await prisma.blobStoragesState.upsert({ + create: { + swarmDataId: batchId, + swarmDataTTL: batchTTL, + }, + update: { + swarmDataId: batchId, + swarmDataTTL: batchTTL, + updatedAt: new Date(), + }, + where: { + id: 1, + }, + }); + + const expiryDate = formatTtl(batchTTL); + + return { + batchId, + newExpirationDate: expiryDate, + }; +}; diff --git a/apps/jobs/src/cron-jobs/swarm-stamp/types.ts b/apps/jobs/src/cron-jobs/swarm-stamp/types.ts new file mode 100644 index 000000000..4543c9894 --- /dev/null +++ b/apps/jobs/src/cron-jobs/swarm-stamp/types.ts @@ -0,0 +1,11 @@ +import type { Job } from "bullmq"; + +export type SwarmStampJobResult = { + batchId: string; + newExpirationDate: string; +}; + +export type SwarmStampJob = Job< + { beeEndpoint: string; batchId: string }, + SwarmStampJobResult +>; diff --git a/apps/jobs/src/env.ts b/apps/jobs/src/env.ts new file mode 100644 index 000000000..a9f94aed1 --- /dev/null +++ b/apps/jobs/src/env.ts @@ -0,0 +1,85 @@ +import { + booleanSchema, + createEnv, + maskConnectionUrl, + maskJSONRPCUrl, + presetEnvOptions, + z, +} from "@blobscan/zod"; + +import { determineGranularity, getNetworkDencunForkSlot } from "./utils"; + +const networkSchema = z.enum([ + "mainnet", + "holesky", + "hoodi", + "sepolia", + "gnosis", + "chiado", + "devnet", +]); + +export const env = createEnv({ + envOptions: { + server: { + DATABASE_URL: z.string().url(), + DENCUN_FORK_SLOT: z.coerce.number().optional(), + NETWORK_NAME: networkSchema.default("mainnet"), + + // Stats jobs + STATS_SYNCER_DAILY_CRON_PATTERN: z.string().default("30 0 * * * *"), + STATS_SYNCER_OVERALL_CRON_PATTERN: z.string().default("*/15 * * * *"), + + // ETH Price is retrieved every second from the Chainlink: ETH/USD oracle + // in the Polygon network. + ETH_PRICE_SYNCER_ENABLED: booleanSchema.default("false"), + ETH_PRICE_SYNCER_CRON_PATTERN: z.string().default("* * * * *"), + ETH_PRICE_SYNCER_CHAIN_ID: z.coerce.number().default(137), + ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL: z.string().url().optional(), + ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS: z + .string() + .default("0xF9680D99D6C9589e2a93a78A04A279e509205945"), + ETH_PRICE_SYNCER_TIME_TOLERANCE: z.coerce.number().positive().optional(), + + REDIS_URI: z.string().default("redis://localhost:6379"), + + SWARM_STAMP_SYNCER_ENABLED: booleanSchema.default("false"), + SWARM_STAMP_CRON_PATTERN: z.string().default("*/15 * * * *"), + SWARM_BATCH_ID: z.string().optional(), + BEE_ENDPOINT: z.string().url().optional(), + }, + ...presetEnvOptions, + }, + display(env) { + console.log( + `ETH Price Cron Job Worker granularity=${determineGranularity( + env.ETH_PRICE_SYNCER_CRON_PATTERN + )}, redisUri=${maskConnectionUrl( + env.REDIS_URI + )}, databaseUrl=${maskConnectionUrl(env.DATABASE_URL)}, chainId=${ + env.ETH_PRICE_SYNCER_CHAIN_ID + }, jsonRpcUrl=${maskJSONRPCUrl( + env.ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL + )}, priceFeedContract=${ + env.ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS + }, timeTolerance=${env.ETH_PRICE_SYNCER_TIME_TOLERANCE}` + ); + console.log( + `Daily Stats Cron Job pattern=${env.STATS_SYNCER_DAILY_CRON_PATTERN}` + ); + console.log( + `Overall Stats Cron Job pattern=${ + env.STATS_SYNCER_OVERALL_CRON_PATTERN + }, forkSlot=${ + env.DENCUN_FORK_SLOT ?? getNetworkDencunForkSlot(env.NETWORK_NAME) + }` + ); + console.log( + `Swarm Stamp Cron Job enabled=${ + env.SWARM_STAMP_SYNCER_ENABLED + }, pattern=${env.SWARM_STAMP_CRON_PATTERN}, beeEndpoint=${ + env.BEE_ENDPOINT ? maskConnectionUrl(env.BEE_ENDPOINT) : undefined + }, batchId=${env.SWARM_BATCH_ID}` + ); + }, +}); diff --git a/apps/jobs/src/errors.ts b/apps/jobs/src/errors.ts new file mode 100644 index 000000000..db05e4355 --- /dev/null +++ b/apps/jobs/src/errors.ts @@ -0,0 +1,9 @@ +export class ErrorException extends Error { + constructor(message: string, cause?: unknown) { + super(message, { + cause, + }); + + this.name = this.constructor.name; + } +} diff --git a/apps/jobs/src/index.ts b/apps/jobs/src/index.ts new file mode 100644 index 000000000..18ae299da --- /dev/null +++ b/apps/jobs/src/index.ts @@ -0,0 +1,83 @@ +import { logger } from "@blobscan/logger"; + +import type { BaseCronJob } from "./cron-jobs/BaseCronJob"; +import { DailyStatsCronJob } from "./cron-jobs/daily-stats/DailyStatsCronJob"; +import { EthPriceCronJob } from "./cron-jobs/eth-price/EthPriceCronJob"; +import { OverallStatsCronJob } from "./cron-jobs/overall-stats/OverallStatsCronJob"; +import { SwarmStampCronJob } from "./cron-jobs/swarm-stamp/SwarmStampCronJob"; +import { env } from "./env"; +import { createRedis } from "./redis"; +import { getNetworkDencunForkSlot, gracefulShutdown } from "./utils"; + +const cronJobs: BaseCronJob[] = []; + +async function main() { + env.display(); + + const redis = createRedis(env.REDIS_URI); + + cronJobs.push( + new DailyStatsCronJob({ + cronPattern: env.STATS_SYNCER_DAILY_CRON_PATTERN, + redisUriOrConnection: redis, + }), + new OverallStatsCronJob({ + cronPattern: env.STATS_SYNCER_OVERALL_CRON_PATTERN, + redisUriOrConnection: redis, + forkSlot: + env.DENCUN_FORK_SLOT ?? getNetworkDencunForkSlot(env.NETWORK_NAME), + }) + ); + + if (env.ETH_PRICE_SYNCER_ENABLED) { + if (!env.ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL) { + logger.warn( + `Skipping eth price cron job: ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL not defined` + ); + } else { + cronJobs.push( + new EthPriceCronJob({ + cronPattern: env.ETH_PRICE_SYNCER_CRON_PATTERN, + redisUriOrConnection: redis, + }) + ); + } + } + + if (env.SWARM_STAMP_SYNCER_ENABLED) { + if (!env.BEE_ENDPOINT) { + logger.warn(`Skipping swarm stamp cron job: BEE_ENDPOINT not defined`); + } else { + cronJobs.push( + new SwarmStampCronJob({ + beeEndpoint: env.BEE_ENDPOINT, + batchId: env.SWARM_BATCH_ID, + cronPattern: env.SWARM_STAMP_CRON_PATTERN, + redisUriOrConnection: redis, + }) + ); + } + } + + await Promise.all(cronJobs.map((j) => j.start())); +} + +async function shutdown() { + for (const job of cronJobs) { + try { + await job.close(); + } catch (err) { + logger.error(`Error closing job ${job.constructor.name}:`, err); + } + } +} + +main().catch(async (err) => { + console.error(err); + + await shutdown(); + + process.exit(0); +}); + +gracefulShutdown(shutdown); diff --git a/apps/jobs/src/redis.ts b/apps/jobs/src/redis.ts new file mode 100644 index 000000000..b1d9cecc8 --- /dev/null +++ b/apps/jobs/src/redis.ts @@ -0,0 +1,7 @@ +import { Redis } from "ioredis"; + +export function createRedis(uri: string) { + return new Redis(uri, { + maxRetriesPerRequest: null, + }); +} diff --git a/apps/jobs/src/utils.ts b/apps/jobs/src/utils.ts new file mode 100644 index 000000000..9d50ee069 --- /dev/null +++ b/apps/jobs/src/utils.ts @@ -0,0 +1,62 @@ +import dayjs from "@blobscan/dayjs"; + +import type { env } from "./env"; + +export type Granularity = "minute" | "hour" | "day"; + +export function determineGranularity(cronPattern: string): Granularity { + switch (cronPattern) { + case "0 * * * *": + return "hour"; + case "0 0 * * *": + return "day"; + case "* * * * *": + return "minute"; + default: + throw new Error(`Unsupported cron pattern: ${cronPattern}`); + } +} + +export function isUnset( + value: T | undefined | null +): value is null | undefined { + return value === undefined || value === null; +} + +export function formatDate(date: Date | string | dayjs.Dayjs) { + return dayjs(date).format("YYYY-MM-DD"); +} + +export function getNetworkDencunForkSlot( + networkName: (typeof env)["NETWORK_NAME"] +): number { + switch (networkName) { + case "mainnet": + return 8626176; + case "holesky": + return 950272; + case "hoodi": + return 0; + case "sepolia": + return 4243456; + case "gnosis": + return 14237696; + case "chiado": + return 8265728; + case "devnet": + return 0; + } +} + +export function gracefulShutdown(teardownOp: () => void | Promise) { + const shutdown = async (signal: string) => { + console.log(`Received ${signal}`); + + await teardownOp(); + + process.exit(0); + }; + + process.on("SIGINT", () => void shutdown("SIGINT")); + process.on("SIGTERM", () => void shutdown("SIGTERM")); +} diff --git a/packages/syncers/test/BaseSyncer.test.ts b/apps/jobs/test/BaseCronJob.test.ts similarity index 67% rename from packages/syncers/test/BaseSyncer.test.ts rename to apps/jobs/test/BaseCronJob.test.ts index 2d480ddf7..17bfe20f9 100644 --- a/packages/syncers/test/BaseSyncer.test.ts +++ b/apps/jobs/test/BaseCronJob.test.ts @@ -2,17 +2,16 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; import { testValidError } from "@blobscan/test"; -import { BaseSyncer } from "../src/BaseSyncer"; -import type { BaseSyncerConfig } from "../src/BaseSyncer"; -import { SyncerError } from "../src/errors"; +import { BaseCronJob, CronJobError } from "../src/cron-jobs/BaseCronJob"; +import type { BaseCronJobConfig } from "../src/cron-jobs/BaseCronJob"; -class PeriodicUpdaterMock extends BaseSyncer { - constructor({ name, syncerFn: updaterFn }: Partial = {}) { +class BaseCronJobMock extends BaseCronJob { + constructor({ name }: Partial = {}) { super({ name: name ?? "test-updater", redisUriOrConnection: "redis://localhost:6379/1", cronPattern: "* * * * *", - syncerFn: updaterFn ?? (() => Promise.resolve()), + processor: async () => void {}, }); } @@ -29,20 +28,20 @@ class PeriodicUpdaterMock extends BaseSyncer { } } -describe("PeriodicUpdater", () => { - let periodicUpdater: PeriodicUpdaterMock; +describe("BaseCronJob", () => { + let cronJob: BaseCronJobMock; beforeEach(() => { - periodicUpdater = new PeriodicUpdaterMock(); + cronJob = new BaseCronJobMock(); return async () => { - await periodicUpdater.close(); + await cronJob.close(); }; }); it("should create an updater correctly", async () => { - const queue = periodicUpdater.getQueue(); - const worker = periodicUpdater.getWorker(); + const queue = cronJob.getQueue(); + const worker = cronJob.getWorker(); const isPaused = await queue.isPaused(); expect(worker.isRunning(), "Expected worker to be running").toBeTruthy(); @@ -51,9 +50,9 @@ describe("PeriodicUpdater", () => { describe("when running an updater", () => { it("should set up a repeatable job correctly", async () => { - const queue = periodicUpdater.getQueue(); + const queue = cronJob.getQueue(); - await periodicUpdater.start(); + await cronJob.start(); const jobs = await queue.getRepeatableJobs(); @@ -66,25 +65,25 @@ describe("PeriodicUpdater", () => { testValidError( "should throw a valid error when failing to run", async () => { - const queue = periodicUpdater.getQueue(); + const queue = cronJob.getQueue(); vi.spyOn(queue, "add").mockRejectedValueOnce(new Error("Queue error")); - await periodicUpdater.start(); + await cronJob.start(); }, - SyncerError, + CronJobError, { checkCause: true } ); }); describe("when closing an updater", () => { it("should close correctly", async () => { - const closingPeriodicUpdater = new PeriodicUpdaterMock(); + const closingCronJob = new BaseCronJobMock(); - await closingPeriodicUpdater.start(); + await closingCronJob.start(); - const queue = closingPeriodicUpdater.getQueue(); - const worker = closingPeriodicUpdater.getWorker(); + const queue = closingCronJob.getQueue(); + const worker = closingCronJob.getWorker(); const queueCloseSpy = vi.spyOn(queue, "close").mockResolvedValueOnce(); const queueRemoveAllListenersSpy = vi @@ -96,7 +95,7 @@ describe("PeriodicUpdater", () => { .spyOn(worker, "removeAllListeners") .mockReturnValueOnce(worker); - await closingPeriodicUpdater.close(); + await closingCronJob.close(); expect(queueCloseSpy).toHaveBeenCalledOnce(); expect(workerCloseSpy).toHaveBeenCalledOnce(); @@ -109,8 +108,8 @@ describe("PeriodicUpdater", () => { testValidError( "should throw a valid error when failing to close it", async () => { - const queue = periodicUpdater.getQueue(); - const worker = periodicUpdater.getWorker(); + const queue = cronJob.getQueue(); + const worker = cronJob.getWorker(); vi.spyOn(queue, "close").mockRejectedValueOnce( new Error("Queue closing error") @@ -119,9 +118,9 @@ describe("PeriodicUpdater", () => { new Error("Worker closing error") ); - await periodicUpdater.close(); + await cronJob.close(); }, - SyncerError, + CronJobError, { checkCause: true, } diff --git a/packages/syncers/test/DailyStatsSyncer.test.fixtures.ts b/apps/jobs/test/DailyStatsCronJob.test.fixtures.ts similarity index 100% rename from packages/syncers/test/DailyStatsSyncer.test.fixtures.ts rename to apps/jobs/test/DailyStatsCronJob.test.fixtures.ts diff --git a/packages/syncers/test/DailyStatsSyncer.test.ts b/apps/jobs/test/DailyStatsCronJob.test.ts similarity index 61% rename from packages/syncers/test/DailyStatsSyncer.test.ts rename to apps/jobs/test/DailyStatsCronJob.test.ts index f806e2560..8b1157163 100644 --- a/packages/syncers/test/DailyStatsSyncer.test.ts +++ b/apps/jobs/test/DailyStatsCronJob.test.ts @@ -1,47 +1,17 @@ -import { beforeEach, describe, expect, it, vi } from "vitest"; +import { describe, expect, it, vi } from "vitest"; import { getDateFromISODateTime, toDailyDate } from "@blobscan/dayjs"; import { prisma } from "@blobscan/db"; -import { DailyStatsSyncer } from "../src/syncers/"; -import { CURRENT_DAY_DATA } from "./DailyStatsSyncer.test.fixtures"; +import workerProcessor from "../src/cron-jobs/daily-stats/processor"; +import { CURRENT_DAY_DATA } from "./DailyStatsCronJob.test.fixtures"; import { getDailyStatsDates, indexNewBlock, -} from "./DailyStatsSyncer.test.utils"; - -class DailyStatsSyncerMock extends DailyStatsSyncer { - constructor(redisUri = process.env.REDIS_URI ?? "") { - super({ redisUriOrConnection: redisUri, cronPattern: "* * * * *" }); - } - - getWorker() { - return this.worker; - } - - getWorkerProcessor() { - return this.syncerFn; - } - - getQueue() { - return this.queue; - } -} - -describe("DailyStatsSyncer", () => { - let dailyStatsSyncer: DailyStatsSyncerMock; - - beforeEach(() => { - dailyStatsSyncer = new DailyStatsSyncerMock(); - - return async () => { - await dailyStatsSyncer.close(); - }; - }); +} from "./DailyStatsCronJob.test.utils"; +describe("DailyStatsCronJob", () => { it("should aggregate data for all available days", async () => { - const workerProcessor = dailyStatsSyncer.getWorkerProcessor(); - await workerProcessor(); const dailyStatsDates = await getDailyStatsDates(); @@ -50,8 +20,6 @@ describe("DailyStatsSyncer", () => { }); it("should skip aggregation if not all blocks have been indexed for the last day", async () => { - const workerProcessor = dailyStatsSyncer.getWorkerProcessor(); - await indexNewBlock(CURRENT_DAY_DATA); await workerProcessor(); @@ -69,8 +37,6 @@ describe("DailyStatsSyncer", () => { }); it("should skip aggregation if no blocks have been indexed yet", async () => { - const workerProcessor = dailyStatsSyncer.getWorkerProcessor(); - const findLatestSpy = vi .spyOn(prisma.block, "findLatest") .mockImplementationOnce(() => Promise.resolve(null)); @@ -86,8 +52,6 @@ describe("DailyStatsSyncer", () => { it("should skip aggregation if already up to date", async () => { await indexNewBlock(CURRENT_DAY_DATA); - const workerProcessor = dailyStatsSyncer.getWorkerProcessor(); - await workerProcessor(); const dailyStatsSpy = vi.spyOn(prisma.dailyStats, "aggregate"); diff --git a/packages/syncers/test/DailyStatsSyncer.test.utils.ts b/apps/jobs/test/DailyStatsCronJob.test.utils.ts similarity index 100% rename from packages/syncers/test/DailyStatsSyncer.test.utils.ts rename to apps/jobs/test/DailyStatsCronJob.test.utils.ts diff --git a/apps/jobs/test/EthPriceCronJob.test.ts b/apps/jobs/test/EthPriceCronJob.test.ts new file mode 100644 index 000000000..8d3589a28 --- /dev/null +++ b/apps/jobs/test/EthPriceCronJob.test.ts @@ -0,0 +1,70 @@ +import { afterEach, describe, expect, it, vi } from "vitest"; + +import dayjs from "@blobscan/dayjs"; +import { prisma } from "@blobscan/db"; +import { fixtures, getViemClient } from "@blobscan/test"; + +import workerProcessor from "../src/cron-jobs/eth-price/processor"; +import type { EthPriceJob } from "../src/cron-jobs/eth-price/types"; +import { env } from "../src/env"; + +vi.mock("../src/cron-jobs/eth-price/viem.ts", async () => { + return { + client: getViemClient(), + }; +}); + +describe("EthPriceCronJob", () => { + const job = { + data: { + granularity: "minute", + }, + } as EthPriceJob; + + afterEach(() => { + vi.setSystemTime(fixtures.systemDate); + vi.resetAllMocks(); + }); + + it("should sync eth price for current timestamp", async () => { + vi.setSystemTime("2023-08-31T11:56:30"); + await workerProcessor(job); + + const expectedTimestamp = dayjs().utc().startOf("minute"); + + const ethPrices = await prisma.ethUsdPrice.findMany(); + const ethPrice = ethPrices[0]; + + expect(ethPrice?.timestamp.toISOString()).toEqual( + expectedTimestamp.toISOString() + ); + }); + + describe("when time tolerance is set", () => { + it("should sync eth price if it is within threshold", async () => { + vi.setSystemTime("2023-08-31T11:56:30"); + + await workerProcessor(job); + + const latestPrice = await prisma.ethUsdPrice.findFirst({ + orderBy: { + timestamp: "desc", + }, + }); + + const timestamp = dayjs.unix(Number(latestPrice?.timestamp)); + + expect(dayjs().diff(timestamp, "second")).toBeLessThanOrEqual( + env.ETH_PRICE_SYNCER_TIME_TOLERANCE as number + ); + }); + + it("should skip eth price if it is outside threshold", async () => { + await workerProcessor(job); + + const ethPrices = await prisma.ethUsdPrice.findMany(); + + expect(ethPrices).toHaveLength(0); + }); + }); +}); diff --git a/packages/syncers/test/OverallStatsSyncer.test.ts b/apps/jobs/test/OverallStatsCronJob.test.ts similarity index 79% rename from packages/syncers/test/OverallStatsSyncer.test.ts rename to apps/jobs/test/OverallStatsCronJob.test.ts index 8f5d505a4..bb133e2dd 100644 --- a/packages/syncers/test/OverallStatsSyncer.test.ts +++ b/apps/jobs/test/OverallStatsCronJob.test.ts @@ -1,35 +1,12 @@ -import { beforeEach, describe, expect, it, vi } from "vitest"; +import { describe, expect, it, vi } from "vitest"; import { prisma } from "@blobscan/db"; import { fixtures } from "@blobscan/test"; -import { OverallStatsSyncer } from "../src/syncers/OverallStatsSyncer"; -import type { OverallStatsSyncerConfig } from "../src/syncers/OverallStatsSyncer"; - -class OverallStatsUpdaterMock extends OverallStatsSyncer { - constructor(config: Partial = {}) { - const lowestSlot = - config.lowestSlot ?? fixtures.blockchainSyncState[0]?.lastLowerSyncedSlot; - super({ - cronPattern: "* * * * *", - redisUriOrConnection: "redis://localhost:6379/1", - ...config, - lowestSlot, - }); - } - - getWorker() { - return this.worker; - } +import workerProcessor from "../src/cron-jobs/overall-stats/processor"; +import type { OverallStatsJob } from "../src/cron-jobs/overall-stats/types"; - getWorkerProcessor() { - return this.syncerFn; - } - - getQueue() { - return this.queue; - } -} +const FORK_SLOT = fixtures.blockchainSyncState[0]?.lastLowerSyncedSlot ?? 106; function getOverallStats() { return prisma.overallStats @@ -56,21 +33,15 @@ function assertEmptyStats( expect(overallStats, "Overall stats should be empty").toEqual([]); } -describe("OverallStatsUpdater", () => { - let overallStatsUpdater: OverallStatsUpdaterMock; - - beforeEach(() => { - overallStatsUpdater = new OverallStatsUpdaterMock(); - - return async () => { - await overallStatsUpdater.close(); - }; - }); +describe("OverallStatsCronjob", () => { + const job = { + data: { + forkSlot: FORK_SLOT, + }, + } as OverallStatsJob; it("should aggregate overall stats correctly", async () => { - const workerProcessor = overallStatsUpdater.getWorkerProcessor(); - - await workerProcessor(); + await workerProcessor(job); const overallStats = await getOverallStats(); @@ -182,11 +153,10 @@ describe("OverallStatsUpdater", () => { }); it("should update last aggregated block to last finalized block after aggregation", async () => { - const workerProcessor = overallStatsUpdater.getWorkerProcessor(); const expectedLastAggregatedBlock = fixtures.blockchainSyncState[0]?.lastFinalizedBlock; - await workerProcessor(); + await workerProcessor(job); const lastAggregatedBlock = await prisma.blockchainSyncState .findUnique({ @@ -204,9 +174,7 @@ describe("OverallStatsUpdater", () => { it("should aggregate overall stats in batches correctly when there are too many blocks", async () => { const batchSize = 2; - const workerProcessor = new OverallStatsUpdaterMock({ - batchSize, - }).getWorkerProcessor(); + const incrementTransactionSpy = vi.spyOn(prisma.overallStats, "aggregate"); const blockchainSyncState = fixtures.blockchainSyncState[0]; const lastAggregatedBlock = blockchainSyncState @@ -218,7 +186,12 @@ describe("OverallStatsUpdater", () => { (lastFinalizedBlock - lastAggregatedBlock + 1) / batchSize ); - await workerProcessor(); + await workerProcessor({ + data: { + batchSize, + forkSlot: FORK_SLOT, + }, + } as OverallStatsJob); expect( incrementTransactionSpy, @@ -227,8 +200,6 @@ describe("OverallStatsUpdater", () => { }); it("should skip aggregation when no finalized block has been set", async () => { - const workerProcessor = overallStatsUpdater.getWorkerProcessor(); - await prisma.blockchainSyncState.update({ data: { lastFinalizedBlock: null, @@ -238,7 +209,7 @@ describe("OverallStatsUpdater", () => { }, }); - await workerProcessor(); + await workerProcessor(job); const allOverallStats = await getOverallStats().then((allOverallStats) => allOverallStats.filter((stats) => !!stats) @@ -248,11 +219,9 @@ describe("OverallStatsUpdater", () => { }); it("should skip aggregation when no blocks have been indexed yet", async () => { - const workerProcessor = overallStatsUpdater.getWorkerProcessor(); - vi.spyOn(prisma.block, "findLatest").mockResolvedValueOnce(null); - await workerProcessor(); + await workerProcessor(job); const allOverallStats = await getOverallStats().then((allOverallStats) => allOverallStats.filter((stats) => !!stats) @@ -262,11 +231,11 @@ describe("OverallStatsUpdater", () => { }); it("should skip aggregation when the lowest slot hasn't been reached yet", async () => { - const workerProcessor = new OverallStatsUpdaterMock({ - lowestSlot: 1, - }).getWorkerProcessor(); - - await workerProcessor(); + await workerProcessor({ + data: { + forkSlot: 1, + }, + } as OverallStatsJob); const allOverallStats = await getOverallStats().then((allOverallStats) => allOverallStats.filter((stats) => !!stats) @@ -276,13 +245,11 @@ describe("OverallStatsUpdater", () => { }); it("should skip aggregation when there is no new finalized blocks", async () => { - const workerProcessor = overallStatsUpdater.getWorkerProcessor(); - - await workerProcessor(); + await workerProcessor(job); const allOverallStats = await getOverallStats(); - await workerProcessor(); + await workerProcessor(job); const allOverallStatsAfter = await getOverallStats(); diff --git a/packages/syncers/test/SwarmStampSyncer.test.ts b/apps/jobs/test/SwarmStampCronJob.test.ts similarity index 61% rename from packages/syncers/test/SwarmStampSyncer.test.ts rename to apps/jobs/test/SwarmStampCronJob.test.ts index 84ed0d478..040760448 100644 --- a/packages/syncers/test/SwarmStampSyncer.test.ts +++ b/apps/jobs/test/SwarmStampCronJob.test.ts @@ -1,4 +1,3 @@ -/* eslint-disable @typescript-eslint/no-misused-promises */ import { http, HttpResponse } from "msw"; import { setupServer } from "msw/node"; import { beforeAll, beforeEach, describe, expect, it } from "vitest"; @@ -7,17 +6,20 @@ import type { BlobStoragesState } from "@blobscan/db"; import { prisma } from "@blobscan/db"; import { fixtures, testValidError } from "@blobscan/test"; -import type { SwarmStampSyncerConfig } from "../src/syncers/SwarmStampSyncer"; -import { SwarmStampSyncer } from "../src/syncers/SwarmStampSyncer"; +import { CronJobError } from "../src/cron-jobs/BaseCronJob"; +import type { SwarmStampCronJobConfig } from "../src/cron-jobs/swarm-stamp/SwarmStampCronJob"; +import { SwarmStampCronJob } from "../src/cron-jobs/swarm-stamp/SwarmStampCronJob"; +import workerProcessor from "../src/cron-jobs/swarm-stamp/processor"; +import type { SwarmStampJob } from "../src/cron-jobs/swarm-stamp/types"; const BEE_ENDPOINT = process.env.BEE_ENDPOINT ?? "http://localhost:1633"; -class SwarmStampSyncerMock extends SwarmStampSyncer { - constructor({ batchId, cronPattern }: Partial = {}) { +class SwarmStampCronJobMock extends SwarmStampCronJob { + constructor({ batchId, cronPattern }: Partial = {}) { super({ redisUriOrConnection: process.env.REDIS_URI ?? "", cronPattern: cronPattern ?? "* * * * *", - batchId: batchId ?? process.env.SWARM_BATCH_ID ?? "", + batchId, beeEndpoint: BEE_ENDPOINT, }); } @@ -25,17 +27,18 @@ class SwarmStampSyncerMock extends SwarmStampSyncer { getQueue() { return this.queue; } - - getWorkerProcessor() { - return this.syncerFn; - } } -describe("SwarmStampSyncer", () => { +describe("SwarmStampCronjob", () => { const expectedBatchId = fixtures.blobStoragesState[0]?.swarmDataId as string; const expectedBatchTTL = 1000; - let swarmStampSyncer: SwarmStampSyncerMock; + const job = { + data: { + batchId: expectedBatchId, + beeEndpoint: BEE_ENDPOINT, + }, + } as SwarmStampJob; beforeAll(() => { const baseUrl = `${BEE_ENDPOINT}/stamps`; @@ -90,12 +93,38 @@ describe("SwarmStampSyncer", () => { }; }); - beforeEach(() => { - swarmStampSyncer = new SwarmStampSyncerMock(); + describe("when creating a new instance", () => { + let swarmStampCronJob: SwarmStampCronJobMock; - return async () => { - await swarmStampSyncer.close(); - }; + beforeEach(() => { + swarmStampCronJob = new SwarmStampCronJobMock(); + + return async () => { + await swarmStampCronJob.close(); + }; + }); + + it("should fetch batch id from db if none is provided", async () => { + const job = await swarmStampCronJob.start(); + + expect(job?.data).toEqual({ + batchId: expectedBatchId, + beeEndpoint: BEE_ENDPOINT, + }); + }); + + testValidError( + "should throw a valid error when no batch id was found and none was provided", + async () => { + await prisma.blobStoragesState.deleteMany(); + + await swarmStampCronJob.start(); + }, + CronJobError, + { + checkCause: true, + } + ); }); describe("when creating a new swarm batch data row in the db", async () => { @@ -104,9 +133,7 @@ describe("SwarmStampSyncer", () => { beforeEach(async () => { await prisma.blobStoragesState.deleteMany(); - const workerProcessor = swarmStampSyncer.getWorkerProcessor(); - - await workerProcessor().catch((err) => console.log(err)); + await workerProcessor(job).catch((err: unknown) => console.log(err)); blobStorageState = await prisma.blobStoragesState.findFirst(); }); @@ -130,8 +157,7 @@ describe("SwarmStampSyncer", () => { }, }); - const workerProcessor = swarmStampSyncer.getWorkerProcessor(); - await workerProcessor(); + await workerProcessor(job); const blobStorageState = await prisma.blobStoragesState.findFirst(); @@ -141,16 +167,15 @@ describe("SwarmStampSyncer", () => { testValidError( "should fail when trying to fetch a non-existing batch", async () => { - const failingSwarmStampSyncer = new SwarmStampSyncerMock({ - batchId: - "6b538866048cfb6e9e1d06805374c51572c11219d2d550c03e6e277366cb0371", - }); - const failingWorkerProcessor = - failingSwarmStampSyncer.getWorkerProcessor(); - - await failingWorkerProcessor().finally(async () => { - await failingSwarmStampSyncer.close(); - }); + const job = { + data: { + batchId: + "6b538866048cfb6e9e1d06805374c51572c11219d2d550c03e6e277366cb0371", + beeEndpoint: BEE_ENDPOINT, + }, + } as SwarmStampJob; + + await workerProcessor(job); }, Error, { @@ -161,15 +186,14 @@ describe("SwarmStampSyncer", () => { testValidError( "should fail when trying to fetch an invalid batch", async () => { - const failingSwarmStampSyncer = new SwarmStampSyncerMock({ - batchId: "invalid-batch", - }); - const failingWorkerProcessor = - failingSwarmStampSyncer.getWorkerProcessor(); - - await failingWorkerProcessor().finally(async () => { - await failingSwarmStampSyncer.close(); - }); + const job = { + data: { + batchId: "invalid-batch", + beeEndpoint: BEE_ENDPOINT, + }, + } as SwarmStampJob; + + await workerProcessor(job); }, Error, { diff --git a/apps/jobs/test/__snapshots__/BaseCronJob.test.ts.snap b/apps/jobs/test/__snapshots__/BaseCronJob.test.ts.snap new file mode 100644 index 000000000..67f98b8de --- /dev/null +++ b/apps/jobs/test/__snapshots__/BaseCronJob.test.ts.snap @@ -0,0 +1,9 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`BaseCronJob > should throw a valid error when failing to close it 1`] = `"Cron job \\"test-updater-cron-job\\" failed: An error ocurred when performing closing operation"`; + +exports[`BaseCronJob > should throw a valid error when failing to close it 2`] = `[Error: Queue closing error]`; + +exports[`BaseCronJob > when running an updater > should throw a valid error when failing to run 1`] = `"Cron job \\"test-updater-cron-job\\" failed: An error ocurred when starting cron job"`; + +exports[`BaseCronJob > when running an updater > should throw a valid error when failing to run 2`] = `[Error: Queue error]`; diff --git a/apps/jobs/test/__snapshots__/CronJob.test.ts.snap b/apps/jobs/test/__snapshots__/CronJob.test.ts.snap new file mode 100644 index 000000000..76782e23d --- /dev/null +++ b/apps/jobs/test/__snapshots__/CronJob.test.ts.snap @@ -0,0 +1,9 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`CronJob > should throw a valid error when failing to close it 1`] = `"Cron job \\"test-updater-cron-job\\" failed: An error ocurred when performing closing operation"`; + +exports[`CronJob > should throw a valid error when failing to close it 2`] = `[Error: Queue closing error]`; + +exports[`CronJob > when running an updater > should throw a valid error when failing to run 1`] = `"Cron job \\"test-updater-cron-job\\" failed: An error ocurred when starting cron job"`; + +exports[`CronJob > when running an updater > should throw a valid error when failing to run 2`] = `[Error: Queue error]`; diff --git a/packages/syncers/test/__snapshots__/DailyStatsSyncer.test.ts.snap b/apps/jobs/test/__snapshots__/DailyStatsCronJob.test.ts.snap similarity index 92% rename from packages/syncers/test/__snapshots__/DailyStatsSyncer.test.ts.snap rename to apps/jobs/test/__snapshots__/DailyStatsCronJob.test.ts.snap index 26ecb6ca8..5571a4869 100644 --- a/packages/syncers/test/__snapshots__/DailyStatsSyncer.test.ts.snap +++ b/apps/jobs/test/__snapshots__/DailyStatsCronJob.test.ts.snap @@ -1,6 +1,6 @@ // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`DailyStatsSyncer > should aggregate data for all available days 1`] = ` +exports[`DailyStatsCronJob > should aggregate data for all available days 1`] = ` [ [ "2022-10-16", diff --git a/apps/jobs/test/__snapshots__/SwarmStampCronJob.test.ts.snap b/apps/jobs/test/__snapshots__/SwarmStampCronJob.test.ts.snap new file mode 100644 index 000000000..1f591e9ce --- /dev/null +++ b/apps/jobs/test/__snapshots__/SwarmStampCronJob.test.ts.snap @@ -0,0 +1,13 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`SwarmStampCronjob > should fail when trying to fetch a non-existing batch 1`] = `"Failed to fetch stamp batch \\"6b538866048cfb6e9e1d06805374c51572c11219d2d550c03e6e277366cb0371\\""`; + +exports[`SwarmStampCronjob > should fail when trying to fetch a non-existing batch 2`] = `[SwarmNodeError: issuer does not exist]`; + +exports[`SwarmStampCronjob > should fail when trying to fetch an invalid batch 1`] = `"Failed to fetch stamp batch \\"invalid-batch\\""`; + +exports[`SwarmStampCronjob > should fail when trying to fetch an invalid batch 2`] = `[SwarmNodeError: invalid path params]`; + +exports[`SwarmStampCronjob > when creating a new instance > should throw a valid error when no batch id was found and none was provided 1`] = `"Cron job \\"swarm-stamp-cron-job\\" failed: No swarm batch id found"`; + +exports[`SwarmStampCronjob > when creating a new instance > should throw a valid error when no batch id was found and none was provided 2`] = `undefined`; diff --git a/apps/jobs/tsconfig.json b/apps/jobs/tsconfig.json new file mode 100644 index 000000000..1cd6586e1 --- /dev/null +++ b/apps/jobs/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "@blobscan/tsconfig/base.production.json", + "compilerOptions": { + "module": "esnext" + }, + "ts-node": { + // these options are overrides used only by ts-node + // same as the --compilerOptions flag and the TS_NODE_COMPILER_OPTIONS environment variable + "compilerOptions": { + "module": "commonjs" + } + }, + "include": [ + "src/**/*.ts", + "test/**/*.ts", + "esbuild.config.mjs", + "vitest.config.ts" + ] +} diff --git a/packages/syncers/vitest.config.ts b/apps/jobs/vitest.config.ts similarity index 100% rename from packages/syncers/vitest.config.ts rename to apps/jobs/vitest.config.ts diff --git a/apps/rest-api-server/package.json b/apps/rest-api-server/package.json index b534702a9..79722f508 100644 --- a/apps/rest-api-server/package.json +++ b/apps/rest-api-server/package.json @@ -22,7 +22,6 @@ "@blobscan/logger": "workspace:^0.1.2", "@blobscan/open-telemetry": "workspace:^0.0.9", "@blobscan/price-feed": "workspace:^0.1.0", - "@blobscan/syncers": "workspace:^0.5.0", "@blobscan/zod": "workspace:^0.1.0", "@opentelemetry/instrumentation-express": "^0.33.0", "@sentry/node": "^7.109.0", diff --git a/apps/rest-api-server/src/index.ts b/apps/rest-api-server/src/index.ts index fb2d29997..4b92a6cba 100644 --- a/apps/rest-api-server/src/index.ts +++ b/apps/rest-api-server/src/index.ts @@ -21,70 +21,59 @@ import { printBanner } from "./banner"; import { logger } from "./logger"; import { morganMiddleware } from "./morgan"; import { openApiDocument } from "./openapi"; -import { setUpSyncers } from "./syncers"; collectDefaultMetrics(); printBanner(); -async function main() { - const closeSyncers = await setUpSyncers(); - - const app = express(); - - app.use(cors()); - app.use(bodyParser.json({ limit: "3mb" })); - app.use(morganMiddleware); - - app.get("/metrics", metricsHandler); - - // Serve Swagger UI with our OpenAPI schema - app.use("/", swaggerUi.serve); - app.get("/", swaggerUi.setup(openApiDocument)); - - // Handle incoming OpenAPI requests - app.use( - "/", - createOpenApiExpressMiddleware({ - router: appRouter, - createContext: createTRPCContext({ - scope: "rest-api", - }), - onError({ error }) { - Sentry.captureException(error); - - logger.error(error); - }, - }) - ); - - const server = app.listen(env.BLOBSCAN_API_PORT, () => { - logger.info(`Server started on http://0.0.0.0:${env.BLOBSCAN_API_PORT}`); - }); - - async function gracefulShutdown(signal: string) { - logger.debug(`Received ${signal}. Shutting down...`); - - await apiGracefulShutdown() - .finally(async () => { - await closeSyncers(); - }) - .finally(() => { - server.close(() => { - logger.debug("Server shut down successfully"); - }); - }); - } - - // Listen for TERM signal .e.g. kill - process.on("SIGTERM", async () => { - await gracefulShutdown("SIGTERM"); - }); - - // Listen for INT signal e.g. Ctrl-C - process.on("SIGINT", async () => { - await gracefulShutdown("SIGINT"); +const app = express(); + +app.use(cors()); +app.use(bodyParser.json({ limit: "3mb" })); +app.use(morganMiddleware); + +app.get("/metrics", metricsHandler); + +// Serve Swagger UI with our OpenAPI schema +app.use("/", swaggerUi.serve); +app.get("/", swaggerUi.setup(openApiDocument)); + +// Handle incoming OpenAPI requests +app.use( + "/", + createOpenApiExpressMiddleware({ + router: appRouter, + createContext: createTRPCContext({ + scope: "rest-api", + }), + onError({ error }) { + Sentry.captureException(error); + + logger.error(error); + }, + }) +); + +const server = app.listen(env.BLOBSCAN_API_PORT, () => { + logger.info(`Server started on http://0.0.0.0:${env.BLOBSCAN_API_PORT}`); +}); + +async function gracefulShutdown(signal: string) { + logger.debug(`Received ${signal}. Shutting down...`); + + await apiGracefulShutdown().finally(() => { + server.close(() => { + logger.debug("Server shut down successfully"); + }); }); } -main(); +// Listen for TERM signal .e.g. kill +process.on("SIGTERM", async () => { + await gracefulShutdown("SIGTERM"); +}); + +// Listen for INT signal e.g. Ctrl-C +process.on("SIGINT", async () => { + await gracefulShutdown("SIGINT"); +}); diff --git a/apps/rest-api-server/src/syncers.ts b/apps/rest-api-server/src/syncers.ts deleted file mode 100644 index 311779dd4..000000000 --- a/apps/rest-api-server/src/syncers.ts +++ /dev/null @@ -1,104 +0,0 @@ -import type { PublicClient } from "viem"; -import { createPublicClient, http } from "viem"; -import * as chains from "viem/chains"; - -import { env } from "@blobscan/env"; -import { PriceFeed } from "@blobscan/price-feed"; -import type { BaseSyncer } from "@blobscan/syncers"; -import { - DailyStatsSyncer, - ETHPriceSyncer, - OverallStatsSyncer, - SwarmStampSyncer, - createRedisConnection, -} from "@blobscan/syncers"; - -import { logger } from "./logger"; -import { getNetworkDencunForkSlot } from "./utils"; - -export async function setUpSyncers() { - const connection = createRedisConnection(env.REDIS_URI); - const syncers: BaseSyncer[] = []; - - if (env.SWARM_STORAGE_ENABLED) { - if (!env.SWARM_BATCH_ID) { - logger.error(`Can't initialize Swarm stamp job: no batch ID provided`); - } else if (!env.BEE_ENDPOINT) { - logger.error( - "Can't initialize Swarm stamp job: no Bee endpoint provided" - ); - } else { - syncers.push( - new SwarmStampSyncer({ - cronPattern: env.SWARM_STAMP_CRON_PATTERN, - redisUriOrConnection: connection, - batchId: env.SWARM_BATCH_ID, - beeEndpoint: env.BEE_ENDPOINT, - }) - ); - } - } - - syncers.push( - new DailyStatsSyncer({ - cronPattern: env.STATS_SYNCER_DAILY_CRON_PATTERN, - redisUriOrConnection: connection, - }) - ); - - syncers.push( - new OverallStatsSyncer({ - cronPattern: env.STATS_SYNCER_OVERALL_CRON_PATTERN, - redisUriOrConnection: connection, - lowestSlot: - env.DENCUN_FORK_SLOT ?? getNetworkDencunForkSlot(env.NETWORK_NAME), - }) - ); - - if (env.ETH_PRICE_SYNCER_ENABLED && env.ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL) { - const chain = Object.values(chains).find( - (c) => c.id === env.ETH_PRICE_SYNCER_CHAIN_ID - ); - - if (!chain) { - throw new Error( - `Can't initialize ETH price syncer: chain with id ${env.ETH_PRICE_SYNCER_CHAIN_ID} not found` - ); - } - - const client = createPublicClient({ - chain, - transport: http(env.ETH_PRICE_SYNCER_CHAIN_JSON_RPC_URL), - }); - - const ethUsdPriceFeed = await PriceFeed.create({ - client: client as PublicClient, - dataFeedContractAddress: - env.ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS as `0x${string}`, - timeTolerance: env.ETH_PRICE_SYNCER_TIME_TOLERANCE, - }); - - syncers.push( - new ETHPriceSyncer({ - cronPattern: env.ETH_PRICE_SYNCER_CRON_PATTERN, - redisUriOrConnection: connection, - ethUsdPriceFeed, - }) - ); - } - - await Promise.all(syncers.map((syncer) => syncer.start())); - - return () => { - let teardownPromise = Promise.resolve(); - - for (const syncer of syncers) { - // eslint-disable-next-line @typescript-eslint/no-misused-promises - teardownPromise = teardownPromise.finally(async () => { - await syncer.close(); - }); - } - - return teardownPromise; - }; -} diff --git a/docker-compose.local.yml b/docker-compose.local.yml index aec284dd2..bfa3357a7 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -19,6 +19,17 @@ services: depends_on: - postgres + jobs: + extends: + file: docker-compose.yml + service: jobs + build: + context: . + target: jobs + depends_on: + - postgres + - redis + docs: extends: file: docker-compose.yml diff --git a/docker-compose.yml b/docker-compose.yml index 6093acf78..52db9e1cc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,6 +22,11 @@ services: - /tmp/blobscan-blobs:/tmp/blobscan-blobs env_file: - ".env" + jobs: + image: blossomlabs/blobscan-jobs:${BLOBSCAN_JOBS_TAG} + restart: always + env_file: + - .env indexer: image: blossomlabs/blobscan-indexer:master diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index 1a7182499..d63f8a9b4 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -20,11 +20,15 @@ _main() { elif [ "$1" = 'api' ]; then apply_prisma_migrations --schema schema.prisma node /app/index.js + elif [ "$1" = 'jobs' ]; then + node /app/index.js elif [ "$1" = '--help' ]; then echo "## Blobscan ##" echo "" echo "Usage:" - echo "$0 [web|api]" + echo " $0 web # Start the web server" + echo " $0 api # Start the REST API" + echo " $0 jobs # Run cron jobs" else echo "Invalid command: $1" exit 1 diff --git a/packages/syncers/package.json b/packages/syncers/package.json deleted file mode 100644 index 49d64c142..000000000 --- a/packages/syncers/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "@blobscan/syncers", - "description": "Blobscan's stats syncer", - "private": true, - "version": "0.5.0", - "main": "./src/index.ts", - "scripts": { - "clean": "rm -rf .turbo node_modules", - "lint": "eslint .", - "lint:fix": "pnpm lint --fix", - "type-check": "tsc --noEmit", - "test": "pnpm with-env:test vitest", - "test:ui": "pnpm with-env:test vitest --ui", - "with-env:test": "dotenv -e ../../.env.test --" - }, - "dependencies": { - "@blobscan/dates": "workspace:*", - "@blobscan/dayjs": "workspace:^0.1.0", - "@blobscan/db": "workspace:^0.18.0", - "@blobscan/logger": "workspace:^0.1.2", - "@blobscan/price-feed": "workspace:^0.1.0", - "@blobscan/zod": "workspace:^0.1.0", - "axios": "^1.7.2", - "bullmq": "^4.13.2", - "ioredis": "^5.3.2" - }, - "eslintConfig": { - "root": true, - "extends": [ - "@blobscan/eslint-config/base" - ] - } -} diff --git a/packages/syncers/src/index.ts b/packages/syncers/src/index.ts deleted file mode 100644 index 2197f0b3b..000000000 --- a/packages/syncers/src/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { BaseSyncer } from "./BaseSyncer"; -export * from "./syncers"; -export { createRedisConnection } from "./utils"; diff --git a/packages/syncers/src/syncers/DailyStatsSyncer.ts b/packages/syncers/src/syncers/DailyStatsSyncer.ts deleted file mode 100644 index f2e2e49a2..000000000 --- a/packages/syncers/src/syncers/DailyStatsSyncer.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { normalizeDate, toDailyDate } from "@blobscan/dayjs"; -import { prisma } from "@blobscan/db"; - -import { BaseSyncer } from "../BaseSyncer"; -import type { CommonSyncerConfig } from "../BaseSyncer"; -import { formatDate } from "../utils"; - -export type DailyStatsSyncerConfig = CommonSyncerConfig; - -export class DailyStatsSyncer extends BaseSyncer { - constructor({ redisUriOrConnection, cronPattern }: DailyStatsSyncerConfig) { - const name = "daily-stats"; - - super({ - name, - redisUriOrConnection, - cronPattern, - syncerFn: async () => { - const lastIndexedBlock = await prisma.block.findLatest(); - - if (!lastIndexedBlock) { - this.logger.debug( - "Skipping stats aggregation. No blocks indexed yet" - ); - - return; - } - const targetDate = normalizeDate(lastIndexedBlock.timestamp).subtract( - 1, - "day" - ); - const targetDay = toDailyDate(targetDate); - - const rawLastDailyStatsDay = await prisma.dailyStats.findFirst({ - select: { day: true }, - where: { category: null, rollup: null }, - orderBy: { day: "desc" }, - }); - const lastDailyStatsDay = rawLastDailyStatsDay?.day - ? normalizeDate(rawLastDailyStatsDay.day) - : undefined; - - if ( - lastDailyStatsDay - ? lastDailyStatsDay?.isSame(targetDay, "day") - : false - ) { - this.logger.debug(`Skipping stats aggregation. Already up to date`); - - return; - } - - const res = await prisma.dailyStats.aggregate({ - from: lastDailyStatsDay?.add(1, "day"), - to: targetDay, - }); - - this.logger.info( - `Daily data up to day ${formatDate( - targetDay - )} aggregated. ${res} daily stats created successfully.` - ); - }, - }); - } -} diff --git a/packages/syncers/src/syncers/ETHPriceSyncer.ts b/packages/syncers/src/syncers/ETHPriceSyncer.ts deleted file mode 100644 index cacb7b13c..000000000 --- a/packages/syncers/src/syncers/ETHPriceSyncer.ts +++ /dev/null @@ -1,74 +0,0 @@ -import dayjs, { normalizeDate } from "@blobscan/dayjs"; -import { prisma } from "@blobscan/db"; -import type { PriceFeed } from "@blobscan/price-feed"; - -import { BaseSyncer } from "../BaseSyncer"; -import type { CommonSyncerConfig } from "../BaseSyncer"; - -export interface ETHPriceSyncerConfig extends CommonSyncerConfig { - ethUsdPriceFeed: PriceFeed; -} - -type Granularity = "minute" | "hour" | "day"; - -function determineGranularity(cronPattern: string): Granularity { - switch (cronPattern) { - case "0 * * * *": - return "hour"; - case "0 0 * * *": - return "day"; - case "* * * * *": - return "minute"; - default: - throw new Error(`Unsupported cron pattern: ${cronPattern}`); - } -} - -export class ETHPriceSyncer extends BaseSyncer { - constructor(config: ETHPriceSyncerConfig) { - super({ - ...config, - name: "eth-price", - syncerFn: async () => { - const now = normalizeDate(dayjs()); - const granularity = determineGranularity(config.cronPattern); - - const targetDateTime = now.startOf(granularity); - - const priceData = await config.ethUsdPriceFeed.findPriceByTimestamp( - targetDateTime.unix() - ); - - if (!priceData) { - this.logger.warn( - `Skipping eth price update: No price data found for datetime ${targetDateTime - .utc() - .toISOString()}` - ); - - return; - } - - const roundId = priceData.roundId.toString(); - const price = priceData.price; - const priceTimestamp = targetDateTime.toDate(); - const priceRow = { - price, - timestamp: priceTimestamp, - }; - - await prisma.ethUsdPrice.upsert({ - create: priceRow, - update: priceRow, - where: { - timestamp: priceTimestamp, - }, - }); - - this.logger.debug( - `ETH price synced: $${price} at ${targetDateTime.toISOString()} recorded (retrieved from round ${roundId})` - ); - }, - }); - } -} diff --git a/packages/syncers/src/syncers/OverallStatsSyncer.ts b/packages/syncers/src/syncers/OverallStatsSyncer.ts deleted file mode 100644 index cdd1f85a1..000000000 --- a/packages/syncers/src/syncers/OverallStatsSyncer.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { prisma } from "@blobscan/db"; -import type { BlockNumberRange } from "@blobscan/db"; - -import { BaseSyncer } from "../BaseSyncer"; -import type { CommonSyncerConfig } from "../BaseSyncer"; - -export interface OverallStatsSyncerConfig extends CommonSyncerConfig { - batchSize?: number; - lowestSlot?: number; -} - -const DEFAULT_BATCH_SIZE = 2_000_000; -const DEFAULT_INITIAL_SLOT = 0; - -function isUnset(value: T | undefined | null): value is null | undefined { - return value === undefined || value === null; -} - -export class OverallStatsSyncer extends BaseSyncer { - constructor({ - cronPattern, - redisUriOrConnection, - batchSize = DEFAULT_BATCH_SIZE, - lowestSlot = DEFAULT_INITIAL_SLOT, - }: OverallStatsSyncerConfig) { - const name = "overall-stats"; - super({ - name, - cronPattern, - redisUriOrConnection, - syncerFn: async () => { - const [blockchainSyncState, latestBlock] = await Promise.all([ - prisma.blockchainSyncState.findUnique({ - select: { - lastLowerSyncedSlot: true, - lastAggregatedBlock: true, - lastFinalizedBlock: true, - }, - where: { - id: 1, - }, - }), - prisma.block.findLatest(), - ]); - const { lastAggregatedBlock, lastFinalizedBlock, lastLowerSyncedSlot } = - blockchainSyncState ?? {}; - const lastIndexedBlockNumber = latestBlock?.number; - - if ( - isUnset(lastIndexedBlockNumber) || - isUnset(lastFinalizedBlock) || - lastLowerSyncedSlot !== lowestSlot - ) { - this.logger.debug( - "Skipping stats aggregation. Chain hasn't been fully indexed yet" - ); - - return; - } - - const blockRange: BlockNumberRange = { - from: lastAggregatedBlock ? lastAggregatedBlock + 1 : 0, - to: Math.min(lastFinalizedBlock, lastIndexedBlockNumber), - }; - - if (blockRange.from >= blockRange.to) { - this.logger.debug( - `Skipping stats aggregation. No new finalized blocks (last aggregated block: ${lastAggregatedBlock})` - ); - - return; - } - - const { from, to } = blockRange; - const unprocessedBlocks = to - from + 1; - const batches = Math.ceil(unprocessedBlocks / batchSize); - - for (let i = 0; i < batches; i++) { - const batchFrom = from + i * batchSize; - const batchTo = Math.min(batchFrom + batchSize - 1, to); - const batchBlockRange: BlockNumberRange = { - from: batchFrom, - to: batchTo, - }; - - await prisma.overallStats.aggregate({ blockRange: batchBlockRange }); - - if (batches > 1) { - const formattedFromBlock = batchBlockRange.from.toLocaleString(); - const formattedToBlock = batchBlockRange.to.toLocaleString(); - - this.logger.debug( - `(batch ${ - i + 1 - }/${batches}) Data from block ${formattedFromBlock} up to ${formattedToBlock} aggregated successfully` - ); - } - } - - this.logger.info( - `Data from block ${blockRange.from.toLocaleString()} up to ${blockRange.to.toLocaleString()} aggregated successfully.` - ); - }, - }); - } -} diff --git a/packages/syncers/src/syncers/SwarmStampSyncer.ts b/packages/syncers/src/syncers/SwarmStampSyncer.ts deleted file mode 100644 index 5fded500d..000000000 --- a/packages/syncers/src/syncers/SwarmStampSyncer.ts +++ /dev/null @@ -1,77 +0,0 @@ -import type { AxiosResponse } from "axios"; -import { AxiosError } from "axios"; -import axios from "axios"; - -import { formatTtl } from "@blobscan/dates"; -import { prisma } from "@blobscan/db"; - -import { BaseSyncer } from "../BaseSyncer"; -import type { CommonSyncerConfig } from "../BaseSyncer"; -import { SwarmNodeError } from "../errors"; - -type BatchData = { - batchID: string; - batchTTL: number; -}; - -export interface SwarmStampSyncerConfig extends CommonSyncerConfig { - beeEndpoint: string; - batchId: string; -} - -export class SwarmStampSyncer extends BaseSyncer { - constructor({ - cronPattern, - redisUriOrConnection, - batchId, - beeEndpoint, - }: SwarmStampSyncerConfig) { - const name = "swarm-stamp"; - super({ - name, - cronPattern, - redisUriOrConnection, - syncerFn: async () => { - let response: AxiosResponse; - - try { - const url = `${beeEndpoint}/stamps/${batchId}`; - - response = await axios.get(url); - } catch (err) { - let cause = err; - - if (err instanceof AxiosError) { - cause = new SwarmNodeError(err); - } - - throw new Error(`Failed to fetch stamp batch "${batchId}"`, { - cause, - }); - } - - const { batchTTL } = response.data; - - await prisma.blobStoragesState.upsert({ - create: { - swarmDataId: batchId, - swarmDataTTL: batchTTL, - }, - update: { - swarmDataId: batchId, - swarmDataTTL: batchTTL, - updatedAt: new Date(), - }, - where: { - id: 1, - }, - }); - - const expiryDate = formatTtl(batchTTL); - this.logger.info( - `Swarm batch ID "${batchId}" updated. New expiry date: ${expiryDate}.` - ); - }, - }); - } -} diff --git a/packages/syncers/src/syncers/index.ts b/packages/syncers/src/syncers/index.ts deleted file mode 100644 index 568234609..000000000 --- a/packages/syncers/src/syncers/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -export { DailyStatsSyncer } from "./DailyStatsSyncer"; -export type { DailyStatsSyncerConfig } from "./DailyStatsSyncer"; -export { OverallStatsSyncer } from "./OverallStatsSyncer"; -export type { OverallStatsSyncerConfig } from "./OverallStatsSyncer"; -export { SwarmStampSyncer } from "./SwarmStampSyncer"; -export type { SwarmStampSyncerConfig } from "./SwarmStampSyncer"; -export { ETHPriceSyncer } from "./ETHPriceSyncer"; diff --git a/packages/syncers/src/utils.ts b/packages/syncers/src/utils.ts deleted file mode 100644 index 24f5cbbf7..000000000 --- a/packages/syncers/src/utils.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Redis } from "ioredis"; - -import dayjs from "@blobscan/dayjs"; - -export function createRedisConnection(uri: string) { - return new Redis(uri, { - maxRetriesPerRequest: null, - }); -} - -export function formatDate(date: Date | string | dayjs.Dayjs) { - return dayjs(date).format("YYYY-MM-DD"); -} diff --git a/packages/syncers/test/EthPriceSyncer.test.ts b/packages/syncers/test/EthPriceSyncer.test.ts deleted file mode 100644 index 86ae96e8b..000000000 --- a/packages/syncers/test/EthPriceSyncer.test.ts +++ /dev/null @@ -1,127 +0,0 @@ -/* eslint-disable @typescript-eslint/no-non-null-assertion */ - -import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; - -import dayjs from "@blobscan/dayjs"; -import { prisma } from "@blobscan/db"; -import { PriceFeed } from "@blobscan/price-feed"; -import { fixtures, getViemClient } from "@blobscan/test"; - -import { env } from "../../env"; -import { ETHPriceSyncer } from "../src"; -import type { ETHPriceSyncerConfig } from "../src/syncers/ETHPriceSyncer"; - -class EthPriceUpdaterMock extends ETHPriceSyncer { - constructor({ - ethUsdPriceFeed, - }: Pick) { - super({ - cronPattern: env.ETH_PRICE_SYNCER_CRON_PATTERN, - redisUriOrConnection: env.REDIS_URI, - ethUsdPriceFeed, - }); - } - - getWorker() { - return this.worker; - } - - getWorkerProcessor() { - return this.syncerFn; - } - - getQueue() { - return this.queue; - } -} - -describe("EthPriceSyncer", () => { - let ethPriceUpdater: EthPriceUpdaterMock; - - beforeEach(async () => { - const ethUsdPriceFeed = await PriceFeed.create({ - client: getViemClient(), - dataFeedContractAddress: - env.ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS! as `0x${string}`, - }); - ethPriceUpdater = new EthPriceUpdaterMock({ ethUsdPriceFeed }); - - return async () => { - await ethPriceUpdater.close(); - }; - }); - - afterEach(() => { - vi.setSystemTime(fixtures.systemDate); - }); - - it("should sync eth price for current timestamp", async () => { - const workerProcessor = ethPriceUpdater.getWorkerProcessor(); - - await workerProcessor(); - - const expectedTimestamp = dayjs().utc().startOf("minute"); - - const ethPrices = await prisma.ethUsdPrice.findMany(); - const ethPrice = ethPrices[0]; - - expect(ethPrice?.timestamp.toISOString()).toEqual( - expectedTimestamp.toISOString() - ); - }); - - describe("when time tolerance is set", () => { - it("should sync eth price if it is within threshold", async () => { - const timeTolerance = 3600; - const ethUsdPriceFeed = await PriceFeed.create({ - client: getViemClient(), - dataFeedContractAddress: - env.ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS! as `0x${string}`, - timeTolerance, - }); - - const ethPriceUpdaterWithTimeTolerance = new EthPriceUpdaterMock({ - ethUsdPriceFeed, - }); - - const workerProcessor = - ethPriceUpdaterWithTimeTolerance.getWorkerProcessor(); - - await workerProcessor(); - - const latestPrice = await prisma.ethUsdPrice.findFirst({ - orderBy: { - timestamp: "desc", - }, - }); - - const timestamp = dayjs.unix(Number(latestPrice?.timestamp)); - - expect(dayjs().diff(timestamp, "second")).toBeLessThanOrEqual( - timeTolerance - ); - }); - - it("should skip eth price if it is outside threshold", async () => { - const timeTolerance = 60; - - const ethUsdPriceFeed = await PriceFeed.create({ - client: getViemClient(), - dataFeedContractAddress: - env.ETH_PRICE_SYNCER_ETH_USD_PRICE_FEED_CONTRACT_ADDRESS! as `0x${string}`, - timeTolerance, - }); - const ethPriceUpdaterWithTimeTolerance = new EthPriceUpdaterMock({ - ethUsdPriceFeed, - }); - - const workerProcessor = - ethPriceUpdaterWithTimeTolerance.getWorkerProcessor(); - await workerProcessor(); - - const ethPrices = await prisma.ethUsdPrice.findMany(); - - expect(ethPrices).toHaveLength(0); - }); - }); -}); diff --git a/packages/syncers/test/__snapshots__/BaseSyncer.test.ts.snap b/packages/syncers/test/__snapshots__/BaseSyncer.test.ts.snap deleted file mode 100644 index dba557d81..000000000 --- a/packages/syncers/test/__snapshots__/BaseSyncer.test.ts.snap +++ /dev/null @@ -1,9 +0,0 @@ -// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html - -exports[`PeriodicUpdater > should throw a valid error when failing to close it 1`] = `"Syncer \\"test-updater-syncer\\" failed: An error ocurred when performing closing operation"`; - -exports[`PeriodicUpdater > should throw a valid error when failing to close it 2`] = `[Error: Queue closing error]`; - -exports[`PeriodicUpdater > when running an updater > should throw a valid error when failing to run 1`] = `"Syncer \\"test-updater-syncer\\" failed: An error ocurred when starting syncer"`; - -exports[`PeriodicUpdater > when running an updater > should throw a valid error when failing to run 2`] = `[Error: Queue error]`; diff --git a/packages/syncers/test/__snapshots__/PeriodicUpdater.test.ts.snap b/packages/syncers/test/__snapshots__/PeriodicUpdater.test.ts.snap deleted file mode 100644 index 5c860cbe0..000000000 --- a/packages/syncers/test/__snapshots__/PeriodicUpdater.test.ts.snap +++ /dev/null @@ -1,9 +0,0 @@ -// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html - -exports[`PeriodicUpdater > should throw a valid error when failing to close it 1`] = `"Updater \\"test-updater-updater\\" failed: An error ocurred when performing closing operation"`; - -exports[`PeriodicUpdater > should throw a valid error when failing to close it 2`] = `[Error: Queue closing error]`; - -exports[`PeriodicUpdater > when running an updater > should throw a valid error when failing to run 1`] = `"Updater \\"test-updater-updater\\" failed: An error ocurred when starting updater"`; - -exports[`PeriodicUpdater > when running an updater > should throw a valid error when failing to run 2`] = `[Error: Queue error]`; diff --git a/packages/syncers/test/__snapshots__/StatsSyncer.test.ts.snap b/packages/syncers/test/__snapshots__/StatsSyncer.test.ts.snap deleted file mode 100644 index 1ab5d442b..000000000 --- a/packages/syncers/test/__snapshots__/StatsSyncer.test.ts.snap +++ /dev/null @@ -1,9 +0,0 @@ -// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html - -exports[`StatsSyncer > when closing the stats syncer > should throw a valid error when failing to close it 1`] = `"Stats syncer failed: An error ocurred when closing syncer"`; - -exports[`StatsSyncer > when closing the stats syncer > should throw a valid error when failing to close it 2`] = `[Error: Some daily stats updater closing error]`; - -exports[`StatsSyncer > when running the stats syncer > should throw a valid error when failing to start it 1`] = `"Stats syncer failed: An error occurred when starting syncer"`; - -exports[`StatsSyncer > when running the stats syncer > should throw a valid error when failing to start it 2`] = `[Error: Something happened when trying to start daily stats updater]`; diff --git a/packages/syncers/test/__snapshots__/SwarmStampSyncer.test.ts.snap b/packages/syncers/test/__snapshots__/SwarmStampSyncer.test.ts.snap deleted file mode 100644 index 8385c3d78..000000000 --- a/packages/syncers/test/__snapshots__/SwarmStampSyncer.test.ts.snap +++ /dev/null @@ -1,9 +0,0 @@ -// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html - -exports[`SwarmStampSyncer > should fail when trying to fetch a non-existing batch 1`] = `"Failed to fetch stamp batch \\"6b538866048cfb6e9e1d06805374c51572c11219d2d550c03e6e277366cb0371\\""`; - -exports[`SwarmStampSyncer > should fail when trying to fetch a non-existing batch 2`] = `[SwarmNodeError: issuer does not exist]`; - -exports[`SwarmStampSyncer > should fail when trying to fetch an invalid batch 1`] = `"Failed to fetch stamp batch \\"invalid-batch\\""`; - -exports[`SwarmStampSyncer > should fail when trying to fetch an invalid batch 2`] = `[SwarmNodeError: invalid path params]`; diff --git a/packages/syncers/test/__snapshots__/SyncerManager.test.ts.snap b/packages/syncers/test/__snapshots__/SyncerManager.test.ts.snap deleted file mode 100644 index 606d6a8bc..000000000 --- a/packages/syncers/test/__snapshots__/SyncerManager.test.ts.snap +++ /dev/null @@ -1,9 +0,0 @@ -// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html - -exports[`SyncerManager > when closing the syncer manager > should throw a valid error when failing to close it 1`] = `"Periodic updater manager failed: An error ocurred when closing syncers"`; - -exports[`SyncerManager > when closing the syncer manager > should throw a valid error when failing to close it 2`] = `[Error: Some daily stats updater closing error]`; - -exports[`SyncerManager > when running the syncer manager > should throw a valid error when failing to start it 1`] = `"Periodic updater manager failed: An error occurred when starting syncers"`; - -exports[`SyncerManager > when running the syncer manager > should throw a valid error when failing to start it 2`] = `[Error: Something happened when trying to start daily stats updater]`; diff --git a/packages/syncers/test/helpers.ts b/packages/syncers/test/helpers.ts deleted file mode 100644 index 0c79d5a59..000000000 --- a/packages/syncers/test/helpers.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { setupServer } from "msw/node"; - -export function createServer(handlers: Parameters[0][]) { - const server = setupServer(...handlers); - - server.listen(); - - return () => { - server.close(); - }; -} diff --git a/packages/syncers/tsconfig.json b/packages/syncers/tsconfig.json deleted file mode 100644 index aff0b2dcd..000000000 --- a/packages/syncers/tsconfig.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "extends": "@blobscan/tsconfig/base.production.json", - "include": ["src/**/*.ts", "test/**/*.ts", "vitest.config.ts"] -} diff --git a/packages/zod/src/create-env.ts b/packages/zod/src/create-env.ts index 98ae38cca..6a2407c41 100644 --- a/packages/zod/src/create-env.ts +++ b/packages/zod/src/create-env.ts @@ -43,3 +43,39 @@ export function maskPassword(uri: string | undefined) { return uri?.replace(regex, (_, username) => `://${username}:****@`); } + +export function maskJSONRPCUrl(url?: string): string | undefined { + if (!url) { + return; + } + + try { + const parsed = new URL(url); + const hostParts = parsed.hostname.split("."); + const domain = hostParts.slice(-3).join("."); + const network = hostParts.slice(1, 2)[0]; + return `https://****.${network}.${domain + .split(".") + .slice(1) + .join(".")}/****`; + } catch { + return "****"; + } +} + +export function maskConnectionUrl(url: string): string { + try { + const parsed = new URL(url); + + // Mask username and password if present + if (parsed.username || parsed.password) { + parsed.username = "***"; + parsed.password = "***"; + } + + return parsed.toString(); + } catch { + // Fallback if URL is malformed or not standard + return url.replace(/\/\/([^:@]+):([^@]+)@/, "//***:***@"); + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 833a2f0b3..3e1f287ab 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -123,7 +123,7 @@ importers: specifier: ^2.1.0 version: 2.1.1 fast-glob: - specifier: ^3.2.12 + specifier: ^3.3.2 version: 3.3.2 flexsearch: specifier: ^0.7.31 @@ -175,6 +175,52 @@ importers: specifier: ^0.2.8 version: 0.2.8(@ianvs/prettier-plugin-sort-imports@3.7.2(prettier@2.8.8))(prettier@2.8.8) + apps/jobs: + dependencies: + '@blobscan/dates': + specifier: workspace:^0.0.1 + version: link:../../packages/dates + '@blobscan/dayjs': + specifier: workspace:^0.1.0 + version: link:../../packages/dayjs + '@blobscan/db': + specifier: workspace:^0.18.0 + version: link:../../packages/db + '@blobscan/logger': + specifier: workspace:^0.1.2 + version: link:../../packages/logger + '@blobscan/price-feed': + specifier: workspace:^0.1.0 + version: link:../../packages/price-feed + '@blobscan/zod': + specifier: workspace:^0.1.0 + version: link:../../packages/zod + axios: + specifier: ^1.7.2 + version: 1.7.7 + bullmq: + specifier: ^4.13.2 + version: 4.17.0 + ioredis: + specifier: ^5.3.2 + version: 5.4.1 + viem: + specifier: ^2.17.4 + version: 2.22.17(typescript@5.5.3)(zod@3.23.8) + devDependencies: + concurrently: + specifier: ^9.2.0 + version: 9.2.0 + esbuild: + specifier: 0.25.5 + version: 0.25.5 + fast-glob: + specifier: ^3.3.2 + version: 3.3.2 + tsx: + specifier: ^4.19.2 + version: 4.19.2 + apps/rest-api-server: dependencies: '@blobscan/api': @@ -192,9 +238,6 @@ importers: '@blobscan/price-feed': specifier: workspace:^0.1.0 version: link:../../packages/price-feed - '@blobscan/syncers': - specifier: workspace:^0.5.0 - version: link:../../packages/syncers '@blobscan/zod': specifier: workspace:^0.1.0 version: link:../../packages/zod @@ -738,36 +781,6 @@ importers: specifier: workspace:^0.18.0 version: link:../db - packages/syncers: - dependencies: - '@blobscan/dates': - specifier: workspace:* - version: link:../dates - '@blobscan/dayjs': - specifier: workspace:^0.1.0 - version: link:../dayjs - '@blobscan/db': - specifier: workspace:^0.18.0 - version: link:../db - '@blobscan/logger': - specifier: workspace:^0.1.2 - version: link:../logger - '@blobscan/price-feed': - specifier: workspace:^0.1.0 - version: link:../price-feed - '@blobscan/zod': - specifier: workspace:^0.1.0 - version: link:../zod - axios: - specifier: ^1.7.2 - version: 1.7.2 - bullmq: - specifier: ^4.13.2 - version: 4.17.0 - ioredis: - specifier: ^5.3.2 - version: 5.4.1 - packages/test: devDependencies: '@blobscan/dayjs': @@ -1757,6 +1770,12 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.25.5': + resolution: {integrity: sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.20.2': resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} engines: {node: '>=12'} @@ -1769,6 +1788,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.25.5': + resolution: {integrity: sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.20.2': resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} engines: {node: '>=12'} @@ -1781,6 +1806,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.25.5': + resolution: {integrity: sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.20.2': resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} engines: {node: '>=12'} @@ -1793,6 +1824,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.25.5': + resolution: {integrity: sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.20.2': resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} engines: {node: '>=12'} @@ -1805,6 +1842,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.25.5': + resolution: {integrity: sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.20.2': resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} engines: {node: '>=12'} @@ -1817,6 +1860,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.25.5': + resolution: {integrity: sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.20.2': resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} engines: {node: '>=12'} @@ -1829,6 +1878,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.25.5': + resolution: {integrity: sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.20.2': resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} engines: {node: '>=12'} @@ -1841,6 +1896,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.25.5': + resolution: {integrity: sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.20.2': resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} engines: {node: '>=12'} @@ -1853,6 +1914,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.25.5': + resolution: {integrity: sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.20.2': resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} engines: {node: '>=12'} @@ -1865,6 +1932,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.25.5': + resolution: {integrity: sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.20.2': resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} engines: {node: '>=12'} @@ -1877,6 +1950,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.25.5': + resolution: {integrity: sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.20.2': resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} engines: {node: '>=12'} @@ -1889,6 +1968,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.25.5': + resolution: {integrity: sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.20.2': resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} engines: {node: '>=12'} @@ -1901,6 +1986,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.25.5': + resolution: {integrity: sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.20.2': resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} engines: {node: '>=12'} @@ -1913,6 +2004,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.25.5': + resolution: {integrity: sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.20.2': resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} engines: {node: '>=12'} @@ -1925,6 +2022,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.25.5': + resolution: {integrity: sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.20.2': resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} engines: {node: '>=12'} @@ -1937,6 +2040,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.25.5': + resolution: {integrity: sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.20.2': resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} engines: {node: '>=12'} @@ -1949,6 +2058,18 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.25.5': + resolution: {integrity: sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.25.5': + resolution: {integrity: sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.20.2': resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} engines: {node: '>=12'} @@ -1961,12 +2082,24 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.25.5': + resolution: {integrity: sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + '@esbuild/openbsd-arm64@0.23.1': resolution: {integrity: sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] + '@esbuild/openbsd-arm64@0.25.5': + resolution: {integrity: sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.20.2': resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} engines: {node: '>=12'} @@ -1979,6 +2112,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.25.5': + resolution: {integrity: sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.20.2': resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} engines: {node: '>=12'} @@ -1991,6 +2130,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.25.5': + resolution: {integrity: sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.20.2': resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} engines: {node: '>=12'} @@ -2003,6 +2148,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.25.5': + resolution: {integrity: sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.20.2': resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} engines: {node: '>=12'} @@ -2015,6 +2166,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.25.5': + resolution: {integrity: sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.20.2': resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} engines: {node: '>=12'} @@ -2027,6 +2184,12 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.25.5': + resolution: {integrity: sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.4.0': resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -3406,6 +3569,9 @@ packages: '@types/estree@1.0.5': resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/express-serve-static-core@4.17.41': resolution: {integrity: sha512-OaJ7XLaelTgrvlZD8/aa0vvvxZdUmlCn6MtWeB7TkiKW70BQLc9XEPpDLPdbo52ZhXUCrznlWdCHWxJWtdyajA==} @@ -3753,6 +3919,11 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + agent-base@6.0.2: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} @@ -4214,6 +4385,11 @@ packages: concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + concurrently@9.2.0: + resolution: {integrity: sha512-IsB/fiXTupmagMW4MNp2lx2cdSN2FfZq78vF90LBB+zZHArbIQZjQtzXCiXnvTxCZSvXanTqFLWBjw2UkLx1SQ==} + engines: {node: '>=18'} + hasBin: true + confbox@0.1.7: resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==} @@ -4632,6 +4808,11 @@ packages: engines: {node: '>=18'} hasBin: true + esbuild@0.25.5: + resolution: {integrity: sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==} + engines: {node: '>=18'} + hasBin: true + escalade@3.1.2: resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==} engines: {node: '>=6'} @@ -6695,6 +6876,9 @@ packages: rusha@0.8.14: resolution: {integrity: sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==} + rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} + safe-array-concat@1.1.2: resolution: {integrity: sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==} engines: {node: '>=0.4'} @@ -6793,6 +6977,10 @@ packages: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} + shell-quote@1.8.3: + resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} + engines: {node: '>= 0.4'} + shimmer@1.2.1: resolution: {integrity: sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==} @@ -8750,7 +8938,7 @@ snapshots: outdent: 0.5.0 prettier: 2.8.8 resolve-from: 5.0.0 - semver: 7.6.3 + semver: 7.7.2 '@changesets/assemble-release-plan@6.0.3': dependencies: @@ -8760,7 +8948,7 @@ snapshots: '@changesets/should-skip-package': 0.1.0 '@changesets/types': 6.0.0 '@manypkg/get-packages': 1.1.3 - semver: 7.6.3 + semver: 7.7.2 '@changesets/changelog-git@0.2.0': dependencies: @@ -8829,7 +9017,7 @@ snapshots: '@manypkg/get-packages': 1.1.3 chalk: 2.4.2 fs-extra: 7.0.1 - semver: 7.6.3 + semver: 7.7.2 '@changesets/get-github-info@0.6.0(encoding@0.1.13)': dependencies: @@ -8857,7 +9045,7 @@ snapshots: '@changesets/types': 6.0.0 '@manypkg/get-packages': 1.1.3 is-subdir: 1.2.0 - micromatch: 4.0.5 + micromatch: 4.0.7 spawndamnit: 2.0.0 '@changesets/logger@0.1.0': @@ -8931,141 +9119,216 @@ snapshots: '@esbuild/aix-ppc64@0.23.1': optional: true + '@esbuild/aix-ppc64@0.25.5': + optional: true + '@esbuild/android-arm64@0.20.2': optional: true '@esbuild/android-arm64@0.23.1': optional: true + '@esbuild/android-arm64@0.25.5': + optional: true + '@esbuild/android-arm@0.20.2': optional: true '@esbuild/android-arm@0.23.1': optional: true + '@esbuild/android-arm@0.25.5': + optional: true + '@esbuild/android-x64@0.20.2': optional: true '@esbuild/android-x64@0.23.1': optional: true + '@esbuild/android-x64@0.25.5': + optional: true + '@esbuild/darwin-arm64@0.20.2': optional: true '@esbuild/darwin-arm64@0.23.1': optional: true + '@esbuild/darwin-arm64@0.25.5': + optional: true + '@esbuild/darwin-x64@0.20.2': optional: true '@esbuild/darwin-x64@0.23.1': optional: true + '@esbuild/darwin-x64@0.25.5': + optional: true + '@esbuild/freebsd-arm64@0.20.2': optional: true '@esbuild/freebsd-arm64@0.23.1': optional: true + '@esbuild/freebsd-arm64@0.25.5': + optional: true + '@esbuild/freebsd-x64@0.20.2': optional: true '@esbuild/freebsd-x64@0.23.1': optional: true + '@esbuild/freebsd-x64@0.25.5': + optional: true + '@esbuild/linux-arm64@0.20.2': optional: true '@esbuild/linux-arm64@0.23.1': optional: true + '@esbuild/linux-arm64@0.25.5': + optional: true + '@esbuild/linux-arm@0.20.2': optional: true '@esbuild/linux-arm@0.23.1': optional: true + '@esbuild/linux-arm@0.25.5': + optional: true + '@esbuild/linux-ia32@0.20.2': optional: true '@esbuild/linux-ia32@0.23.1': optional: true + '@esbuild/linux-ia32@0.25.5': + optional: true + '@esbuild/linux-loong64@0.20.2': optional: true '@esbuild/linux-loong64@0.23.1': optional: true + '@esbuild/linux-loong64@0.25.5': + optional: true + '@esbuild/linux-mips64el@0.20.2': optional: true '@esbuild/linux-mips64el@0.23.1': optional: true + '@esbuild/linux-mips64el@0.25.5': + optional: true + '@esbuild/linux-ppc64@0.20.2': optional: true '@esbuild/linux-ppc64@0.23.1': optional: true + '@esbuild/linux-ppc64@0.25.5': + optional: true + '@esbuild/linux-riscv64@0.20.2': optional: true '@esbuild/linux-riscv64@0.23.1': optional: true + '@esbuild/linux-riscv64@0.25.5': + optional: true + '@esbuild/linux-s390x@0.20.2': optional: true '@esbuild/linux-s390x@0.23.1': optional: true + '@esbuild/linux-s390x@0.25.5': + optional: true + '@esbuild/linux-x64@0.20.2': optional: true '@esbuild/linux-x64@0.23.1': optional: true + '@esbuild/linux-x64@0.25.5': + optional: true + + '@esbuild/netbsd-arm64@0.25.5': + optional: true + '@esbuild/netbsd-x64@0.20.2': optional: true '@esbuild/netbsd-x64@0.23.1': optional: true + '@esbuild/netbsd-x64@0.25.5': + optional: true + '@esbuild/openbsd-arm64@0.23.1': optional: true + '@esbuild/openbsd-arm64@0.25.5': + optional: true + '@esbuild/openbsd-x64@0.20.2': optional: true '@esbuild/openbsd-x64@0.23.1': optional: true + '@esbuild/openbsd-x64@0.25.5': + optional: true + '@esbuild/sunos-x64@0.20.2': optional: true '@esbuild/sunos-x64@0.23.1': optional: true + '@esbuild/sunos-x64@0.25.5': + optional: true + '@esbuild/win32-arm64@0.20.2': optional: true '@esbuild/win32-arm64@0.23.1': optional: true + '@esbuild/win32-arm64@0.25.5': + optional: true + '@esbuild/win32-ia32@0.20.2': optional: true '@esbuild/win32-ia32@0.23.1': optional: true + '@esbuild/win32-ia32@0.25.5': + optional: true + '@esbuild/win32-x64@0.20.2': optional: true '@esbuild/win32-x64@0.23.1': optional: true + '@esbuild/win32-x64@0.25.5': + optional: true + '@eslint-community/eslint-utils@4.4.0(eslint@8.57.0)': dependencies: eslint: 8.57.0 @@ -9723,7 +9986,7 @@ snapshots: '@types/shimmer': 1.2.0 import-in-the-middle: 1.4.2 require-in-the-middle: 7.3.0 - semver: 7.6.3 + semver: 7.7.2 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9734,7 +9997,7 @@ snapshots: '@types/shimmer': 1.2.0 import-in-the-middle: 1.4.2 require-in-the-middle: 7.3.0 - semver: 7.6.3 + semver: 7.7.2 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9745,7 +10008,7 @@ snapshots: '@types/shimmer': 1.2.0 import-in-the-middle: 1.4.2 require-in-the-middle: 7.3.0 - semver: 7.6.3 + semver: 7.7.2 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9757,7 +10020,7 @@ snapshots: '@types/shimmer': 1.0.5 import-in-the-middle: 1.7.1 require-in-the-middle: 7.3.0 - semver: 7.6.3 + semver: 7.7.2 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9870,7 +10133,7 @@ snapshots: '@opentelemetry/propagator-b3': 1.15.2(@opentelemetry/api@1.8.0) '@opentelemetry/propagator-jaeger': 1.15.2(@opentelemetry/api@1.8.0) '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.8.0) - semver: 7.6.3 + semver: 7.7.2 '@opentelemetry/semantic-conventions@1.15.2': {} @@ -10497,7 +10760,7 @@ snapshots: '@types/cors@2.8.17': dependencies: - '@types/node': 20.14.11 + '@types/node': 22.15.18 '@types/cross-spawn@6.0.2': dependencies: @@ -10514,7 +10777,7 @@ snapshots: '@types/eslint-scope@3.7.7': dependencies: '@types/eslint': 8.56.10 - '@types/estree': 1.0.5 + '@types/estree': 1.0.8 optional: true '@types/eslint@8.56.10': @@ -10524,6 +10787,9 @@ snapshots: '@types/estree@1.0.5': {} + '@types/estree@1.0.8': + optional: true + '@types/express-serve-static-core@4.17.41': dependencies: '@types/node': 22.15.18 @@ -10588,7 +10854,7 @@ snapshots: '@types/morgan@1.9.9': dependencies: - '@types/node': 20.14.11 + '@types/node': 22.15.18 '@types/ms@2.1.0': {} @@ -10722,7 +10988,7 @@ snapshots: debug: 4.3.5 globby: 11.1.0 is-glob: 4.0.3 - semver: 7.6.3 + semver: 7.7.2 tsutils: 3.21.0(typescript@5.5.3) optionalDependencies: typescript: 5.5.3 @@ -10739,7 +11005,7 @@ snapshots: '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.5.3) eslint: 8.57.0 eslint-scope: 5.1.1 - semver: 7.6.3 + semver: 7.7.2 transitivePeerDependencies: - supports-color - typescript @@ -10955,9 +11221,9 @@ snapshots: dependencies: acorn: 8.12.1 - acorn-import-attributes@1.9.5(acorn@8.12.1): + acorn-import-attributes@1.9.5(acorn@8.15.0): dependencies: - acorn: 8.12.1 + acorn: 8.15.0 optional: true acorn-jsx@5.3.2(acorn@8.12.1): @@ -10970,6 +11236,9 @@ snapshots: acorn@8.12.1: {} + acorn@8.15.0: + optional: true + agent-base@6.0.2: dependencies: debug: 4.3.5 @@ -11313,7 +11582,7 @@ snapshots: lodash: 4.17.21 msgpackr: 1.10.1 node-abort-controller: 3.1.1 - semver: 7.6.3 + semver: 7.7.2 tslib: 2.6.3 uuid: 9.0.1 transitivePeerDependencies: @@ -11524,6 +11793,16 @@ snapshots: concat-map@0.0.1: {} + concurrently@9.2.0: + dependencies: + chalk: 4.1.2 + lodash: 4.17.21 + rxjs: 7.8.2 + shell-quote: 1.8.3 + supports-color: 8.1.1 + tree-kill: 1.2.2 + yargs: 17.7.2 + confbox@0.1.7: {} config-chain@1.1.13: @@ -12040,6 +12319,34 @@ snapshots: '@esbuild/win32-ia32': 0.23.1 '@esbuild/win32-x64': 0.23.1 + esbuild@0.25.5: + optionalDependencies: + '@esbuild/aix-ppc64': 0.25.5 + '@esbuild/android-arm': 0.25.5 + '@esbuild/android-arm64': 0.25.5 + '@esbuild/android-x64': 0.25.5 + '@esbuild/darwin-arm64': 0.25.5 + '@esbuild/darwin-x64': 0.25.5 + '@esbuild/freebsd-arm64': 0.25.5 + '@esbuild/freebsd-x64': 0.25.5 + '@esbuild/linux-arm': 0.25.5 + '@esbuild/linux-arm64': 0.25.5 + '@esbuild/linux-ia32': 0.25.5 + '@esbuild/linux-loong64': 0.25.5 + '@esbuild/linux-mips64el': 0.25.5 + '@esbuild/linux-ppc64': 0.25.5 + '@esbuild/linux-riscv64': 0.25.5 + '@esbuild/linux-s390x': 0.25.5 + '@esbuild/linux-x64': 0.25.5 + '@esbuild/netbsd-arm64': 0.25.5 + '@esbuild/netbsd-x64': 0.25.5 + '@esbuild/openbsd-arm64': 0.25.5 + '@esbuild/openbsd-x64': 0.25.5 + '@esbuild/sunos-x64': 0.25.5 + '@esbuild/win32-arm64': 0.25.5 + '@esbuild/win32-ia32': 0.25.5 + '@esbuild/win32-x64': 0.25.5 + escalade@3.1.2: {} escape-html@1.0.3: {} @@ -12378,7 +12685,7 @@ snapshots: '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.5 + micromatch: 4.0.7 fast-json-stable-stringify@2.1.0: {} @@ -13307,7 +13614,7 @@ snapshots: make-dir@4.0.0: dependencies: - semver: 7.6.3 + semver: 7.7.2 make-error@1.3.6: {} @@ -13818,7 +14125,7 @@ snapshots: got: 12.6.1 registry-auth-token: 5.0.2 registry-url: 6.0.1 - semver: 7.6.3 + semver: 7.7.2 parent-module@1.0.1: dependencies: @@ -14316,6 +14623,10 @@ snapshots: rusha@0.8.14: {} + rxjs@7.8.2: + dependencies: + tslib: 2.6.3 + safe-array-concat@1.1.2: dependencies: call-bind: 1.0.7 @@ -14457,6 +14768,8 @@ snapshots: shebang-regex@3.0.0: {} + shell-quote@1.8.3: {} + shimmer@1.2.1: {} side-channel@1.0.6: @@ -14673,7 +14986,6 @@ snapshots: supports-color@8.1.1: dependencies: has-flag: 4.0.0 - optional: true supports-preserve-symlinks-flag@1.0.0: {} @@ -14776,7 +15088,7 @@ snapshots: terser@5.32.0: dependencies: '@jridgewell/source-map': 0.3.6 - acorn: 8.12.1 + acorn: 8.15.0 commander: 2.20.3 source-map-support: 0.5.21 optional: true @@ -15233,12 +15545,12 @@ snapshots: webpack@5.93.0: dependencies: '@types/eslint-scope': 3.7.7 - '@types/estree': 1.0.5 + '@types/estree': 1.0.8 '@webassemblyjs/ast': 1.12.1 '@webassemblyjs/wasm-edit': 1.12.1 '@webassemblyjs/wasm-parser': 1.12.1 - acorn: 8.12.1 - acorn-import-attributes: 1.9.5(acorn@8.12.1) + acorn: 8.15.0 + acorn-import-attributes: 1.9.5(acorn@8.15.0) browserslist: 4.23.3 chrome-trace-event: 1.0.4 enhanced-resolve: 5.17.1 @@ -15428,4 +15740,4 @@ snapshots: zx@8.1.4: optionalDependencies: '@types/fs-extra': 11.0.4 - '@types/node': 20.14.11 + '@types/node': 22.15.18 diff --git a/vitest.workspace.ts b/vitest.workspace.ts index 3db97adbf..99bc8c06d 100644 --- a/vitest.workspace.ts +++ b/vitest.workspace.ts @@ -3,4 +3,5 @@ import { defineWorkspace } from "vitest/config"; export default defineWorkspace([ "clis/*/vitest.config.ts", "packages/*/vitest.config.ts", + "apps/*/vitest.config.ts", ]);