diff --git a/CHANGELOG.md b/CHANGELOG.md index 16e9bac..76f877b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. +## [0.64.0] - 2023-08-09 + +### New + +- Added account BOC provider via evernode RPC (new config parameter 'accountProvider`). + ## [0.63.0] - 2023-07-25 ### New diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index 1b6c515..9b8973a 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -1,18 +1,19 @@ { "name": "ton-q-server", - "version": "0.63.0", + "version": "0.64.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "ton-q-server", - "version": "0.63.0", + "version": "0.64.0", "license": "ISC", "dependencies": { "@aws-sdk/client-s3": "^3.347.1", - "@eversdk/core": "^1", - "@eversdk/lib-node": "^1", + "@eversdk/core": "^1.44.1", + "@eversdk/lib-node": "^1.44.1", "@node-redis/client": "^1.0.6", + "@open-rpc/client-js": "^1.8.1", "apollo-cache-inmemory": "1.6.6", "apollo-client": "2.6.10", "apollo-link": "1.2.14", @@ -52,6 +53,7 @@ "@graphql-codegen/typescript": "^2.0.0", "@graphql-codegen/typescript-resolvers": "^2.0.0", "@types/ioredis": "^4.28.10", + "@types/isomorphic-fetch": "0.0.36", "@types/jaeger-client": "^3.18.1", "@types/jest": "^26.0.23", "@types/md5": "^2.3.2", @@ -3656,17 +3658,17 @@ "dev": true }, "node_modules/@eversdk/core": { - "version": "1.42.0", - "resolved": "https://registry.npmjs.org/@eversdk/core/-/core-1.42.0.tgz", - "integrity": "sha512-slkUTFh2uvWCERnYIECaqg1FQO777zC4acNiQPZr7tupUI5FZRJPEYJTu5pqeNaSZ1e0YWzpt9vnVZTD36tgAA==", + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/@eversdk/core/-/core-1.44.1.tgz", + "integrity": "sha512-8v4M19MCsbqrUqZB861rakXeE3/xCM9Dd8oczxZtyJhppmRX3Fk5ltAbwS1oL/+oo2f7nEiXAkz8Zq39KjTVsw==", "engines": { "node": ">=6" } }, "node_modules/@eversdk/lib-node": { - "version": "1.42.0", - "resolved": "https://registry.npmjs.org/@eversdk/lib-node/-/lib-node-1.42.0.tgz", - "integrity": "sha512-as35DMmMYjP3xeHLPEp6wU0d+xpGCpVGW0U8rvJV4CcDekj47N82jTJtNywChDThQVmer6aLqJ5ncM/ygGaxXQ==", + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/@eversdk/lib-node/-/lib-node-1.44.1.tgz", + "integrity": "sha512-4EY2WE8jNAKjAs5X9kOeJOODoJpUBx0VP3cuRXMnBUP4AmQlLO4CpIRgoTWqXnFiDtuBhaeUPjtxDWcQTj0rcw==", "hasInstallScript": true, "engines": { "node": ">=6" @@ -6042,6 +6044,25 @@ "node": ">= 8" } }, + "node_modules/@open-rpc/client-js": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@open-rpc/client-js/-/client-js-1.8.1.tgz", + "integrity": "sha512-vV+Hetl688nY/oWI9IFY0iKDrWuLdYhf7OIKI6U1DcnJV7r4gAgwRJjEr1QVYszUc0gjkHoQJzqevmXMGLyA0g==", + "dependencies": { + "isomorphic-fetch": "^3.0.0", + "isomorphic-ws": "^5.0.0", + "strict-event-emitter-types": "^2.0.0", + "ws": "^7.0.0" + } + }, + "node_modules/@open-rpc/client-js/node_modules/isomorphic-ws": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", + "integrity": "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==", + "peerDependencies": { + "ws": "*" + } + }, "node_modules/@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -6379,6 +6400,12 @@ "@types/node": "*" } }, + "node_modules/@types/isomorphic-fetch": { + "version": "0.0.36", + "resolved": "https://registry.npmjs.org/@types/isomorphic-fetch/-/isomorphic-fetch-0.0.36.tgz", + "integrity": "sha512-ulw4d+vW1HKn4oErSmNN2HYEcHGq0N1C5exlrMM0CRqX1UUpFhGb5lwiom5j9KN3LBJJDLRmYIZz1ghm7FIzZw==", + "dev": true + }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz", @@ -11979,7 +12006,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", - "dev": true, "dependencies": { "node-fetch": "^2.6.1", "whatwg-fetch": "^3.4.1" @@ -18171,6 +18197,11 @@ "node": ">=0.8.0" } }, + "node_modules/strict-event-emitter-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz", + "integrity": "sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==" + }, "node_modules/string-env-interpolation": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz", @@ -19179,8 +19210,7 @@ "node_modules/whatwg-fetch": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz", - "integrity": "sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==", - "dev": true + "integrity": "sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==" }, "node_modules/whatwg-mimetype": { "version": "2.3.0", diff --git a/package.json b/package.json index cf988a5..9037a2a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ton-q-server", - "version": "0.63.0", + "version": "0.64.0", "description": "TON Q Server – realtime queries over TON blockchain.", "main": "index.js", "repository": "git@github.com:tonlabs/ton-q-server.git", @@ -29,10 +29,11 @@ } }, "dependencies": { - "@eversdk/core": "^1", - "@eversdk/lib-node": "^1", - "@node-redis/client": "^1.0.6", "@aws-sdk/client-s3": "^3.347.1", + "@eversdk/core": "^1.44.1", + "@eversdk/lib-node": "^1.44.1", + "@node-redis/client": "^1.0.6", + "@open-rpc/client-js": "^1.8.1", "apollo-cache-inmemory": "1.6.6", "apollo-client": "2.6.10", "apollo-link": "1.2.14", @@ -72,6 +73,7 @@ "@graphql-codegen/typescript": "^2.0.0", "@graphql-codegen/typescript-resolvers": "^2.0.0", "@types/ioredis": "^4.28.10", + "@types/isomorphic-fetch": "0.0.36", "@types/jaeger-client": "^3.18.1", "@types/jest": "^26.0.23", "@types/md5": "^2.3.2", diff --git a/res/type-defs-blockchain/blockchain.graphql b/res/type-defs-blockchain/blockchain.graphql index 4a383e8..06354cb 100644 --- a/res/type-defs-blockchain/blockchain.graphql +++ b/res/type-defs-blockchain/blockchain.graphql @@ -185,6 +185,13 @@ type BlockchainQuery { time_start: Int, "End of the time range, exclusive" time_end: Int, + """ + Defines query scope. + If `true` then query performed on a maximum time range supported by the cloud. + If `false` then query performed on a recent time range supported by the cloud. + You can find an actual information about time ranges on evercloud documentation. + """ + archive: Boolean ): BlockchainMasterSeqNoRange """ diff --git a/src/__tests__/blockchain-data.ts b/src/__tests__/blockchain-mock-data.ts similarity index 99% rename from src/__tests__/blockchain-data.ts rename to src/__tests__/blockchain-mock-data.ts index b17f5ac..435a2cc 100644 --- a/src/__tests__/blockchain-data.ts +++ b/src/__tests__/blockchain-mock-data.ts @@ -21472,6 +21472,35 @@ export const accounts = [ public_cells_dec: "0", workchain_id: 0, }, + { + _key: "0:198880de2ac28bcf71ab8082d7132d22c337879351cae8b48dd397aadf12f206", + _id: "accounts/0:198880de2ac28bcf71ab8082d7132d22c337879351cae8b48dd397aadf12f206", + _rev: "_d5F6M7---A", + acc_type: 1, + balance: "09504e92f56a", + balance_dec: "344915637610", + bits: "3860b", + bits_dec: "34315", + boc: "te6ccg..boc..", + cells: "14a", + cells_dec: "74", + code: "te6ccg..code..", + code_hash: + "80d6c47c4a25543c9b397b71716f3fae1e2c5d247174c52e2c19bd896442b105", + data: "te6ccg..data..", + data_hash: + "ade3c3fb095d33e8958d93245fc6b02dbd9a805a0405b295e0aa76497bf27d52", + id: "0:3d3442a1de0c4f720ee64546ef9714fe0dd83d848115652253b2d7a782c2c954", + json_version: 8, + last_paid: 1647958564, + last_trans_lt: "b1699bfd1b902", + last_trans_lt_dec: "24849604000002", + prev_code_hash: + "80d6c47c4a25543c9b397b71716f3fae1e2c5d247174c52e2c19bd896442b105", + public_cells: "00", + public_cells_dec: "0", + workchain_id: 0, + }, ] export const summary = { diff --git a/src/__tests__/blockchain-mock.ts b/src/__tests__/blockchain-mock.ts new file mode 100644 index 0000000..e2b1a6f --- /dev/null +++ b/src/__tests__/blockchain-mock.ts @@ -0,0 +1,272 @@ +import express from "express" +import { Database } from "arangojs" +import { + accounts as accountsData, + blocks as blocksData, + messages as messagesData, + summary as chainRangesVerificationSummary, + transactions as transactionData, +} from "./blockchain-mock-data" +import { QConfig, resolveConfig, SubscriptionsMode } from "../server/config" +import TONQServer, { DataProviderFactory } from "../server/server" +import QLogs from "../server/logs" +import QBlockchainData from "../server/data/blockchain" +import { QTracer } from "../server/tracing" +import { createTestClient, testConfig } from "./init-tests" +import { QStats } from "../server/stats" +import { createBocProvider } from "../server/data/boc-provider" +import { createAccountProvider } from "../server/data/account-provider" +import { cloneDeep } from "../server/utils" +import { ApolloClient } from "apollo-client" +import gql from "graphql-tag" +import { + blockArchiveFields, + messageArchiveFields, + transactionArchiveFields, +} from "../server/graphql/blockchain/boc-parsers" +import isObject from "subscriptions-transport-ws/dist/utils/is-object" + +function getTestDbServer(): string { + return ( + process.env.Q_DATA_MUT ?? + process.env.Q_ACCOUNTS ?? + "http://localhost:8901" + ) +} + +function getTestDbName(archive: boolean): string { + const TEST_DB_NAME = "Q-server_test_db" + const TEST_ARCHIVE_DB_NAME = "Q-server_test_archive_db" + return archive ? TEST_ARCHIVE_DB_NAME : TEST_DB_NAME +} + +function getTestDbUrl(archive: boolean): string { + const url = new URL(getTestDbServer()) + url.searchParams.set("name", getTestDbName(archive)) + return url.toString() +} + +export function startNodeRpcMock( + port: number, + bocs: { [address: string]: string }, +) { + const app = express() + app.use(express.json()) + app.post("/", (req, res) => { + const boc = bocs[req.body.params.account] + res.contentType("application/json") + res.send( + JSON.stringify({ + jsonrpc: "2.0", + id: req.body.id, + result: boc + ? { + account_boc: boc, + } + : null, + }), + ) + }) + return app.listen(port) +} + +export async function startTestServer( + overrideConfig?: (config: QConfig) => void, +): Promise { + // prepare TONQServer + const dbUrl = getTestDbUrl(false) + + const config = resolveConfig( + {}, + { + blockchain: { + accounts: [dbUrl], + blocks: { + hot: [dbUrl], + }, + transactions: { + hot: [dbUrl], + }, + }, + chainRangesVerification: [dbUrl], + }, + {}, + ) + overrideConfig?.(config) + const providers = new DataProviderFactory(config, new QLogs()) + const blockchainData = new QBlockchainData({ + providers: providers.ensure(), + logs: new QLogs(), + tracer: QTracer.create(testConfig), + stats: QStats.create("", [], 0), + blockBocProvider: createBocProvider(config.blockBocs), + accountProvider: createAccountProvider(config.accountProvider), + isTests: true, + subscriptionsMode: SubscriptionsMode.Arango, + filterConfig: config.queries.filter, + ignoreMessagesForLatency: false, + }) + + const serverConfig = cloneDeep(testConfig) as QConfig + overrideConfig?.(serverConfig) + const server = new TONQServer({ + config: serverConfig, + logs: new QLogs(), + data: blockchainData, + }) + await server.start() + return server +} + +async function initCollection( + db: Database, + collectionName: string, + docs: any[], + archive: boolean, + archiveFields: Set, +) { + const collection = db.collection(collectionName) + await collection.create() + const dbArchiveFields = new Set(archiveFields) + dbArchiveFields.add("_key") + dbArchiveFields.add("_id") + dbArchiveFields.add("_rev") + const saveDocs = archive + ? docs.map(x => buildArchiveValue("", x, dbArchiveFields)) + : docs + await collection.save(saveDocs) +} + +function buildArchiveValue( + path: string, + value: any, + archiveFields: Set, +): any { + if (path !== "" && !archiveFields.has(path)) { + return undefined + } + if (!isObject(value)) { + return value + } + let archiveDoc: any = undefined + for (const [fieldName, fieldValue] of Object.entries(value)) { + const fieldPath = path !== "" ? `${path}.${fieldName}` : fieldName + const archiveValue = buildArchiveValue( + fieldPath, + fieldValue, + archiveFields, + ) + if (archiveValue !== undefined) { + if (archiveDoc === undefined) { + archiveDoc = { [fieldName]: archiveValue } + } else { + archiveDoc[fieldName] = archiveValue + } + } + } + return archiveDoc +} + +async function createMockDb(archive: boolean) { + const dbName = getTestDbName(archive) + const db = new Database(getTestDbServer()) + try { + await db.dropDatabase(dbName) + } catch (err) { + console.log(err) + // do nothing + } + await db.createDatabase(dbName) + await db.useDatabase(dbName) + + await initCollection(db, "blocks", blocksData, archive, blockArchiveFields) + await initCollection( + db, + "messages", + messagesData, + archive, + messageArchiveFields, + ) + await initCollection( + db, + "transactions", + transactionData, + archive, + transactionArchiveFields, + ) + + if (!archive) { + await initCollection(db, "accounts", accountsData, false, new Set()) + + const crv = db.collection("chain_ranges_verification") + await crv.create() + await crv.save(chainRangesVerificationSummary) + } +} + +export async function createTestData() { + await createMockDb(false) + await createMockDb(true) +} + +type TestSetupOptions = { + port?: number + withArchiveDb?: boolean + accounts?: { [hash: string]: string } +} + +export class TestSetup { + constructor( + public client: ApolloClient, + public server: TONQServer, + public accountProvider?: any, + ) {} + + static async create(options: TestSetupOptions): Promise { + const port = options.port ?? 1 + const serverPort = 5000 + port + const accountProviderPort = 6000 + port + const accountProvider = options.accounts + ? startNodeRpcMock(accountProviderPort, options.accounts) + : undefined + const server = await startTestServer(x => { + x.server.port = serverPort + if (options.accounts) { + x.accountProvider.evernodeRpc = { + endpoint: `http://localhost:${accountProviderPort}`, + } + } + const archive = + options.withArchiveDb ?? false ? [getTestDbUrl(true)] : [] + x.archive = archive + x.blockchain.transactions.archive = archive + x.blockchain.blocks.archive = archive + }) + + const client = createTestClient({ + useWebSockets: false, + port: serverPort, + }) + return new TestSetup(client, server, accountProvider) + } + + async queryBlockchain(query: string) { + return (await this.query(`blockchain { ${query} }`)).blockchain + } + + async query(query: string) { + return ( + await this.client.query({ + query: gql(`{ ${query} }`), + }) + ).data + } + + async close(): Promise { + await this.client.stop() + await this.server.stop() + if (this.accountProvider) { + this.accountProvider.close() + } + } +} diff --git a/src/__tests__/blockchain.ts b/src/__tests__/blockchain.ts index fbde8b4..b0ea919 100644 --- a/src/__tests__/blockchain.ts +++ b/src/__tests__/blockchain.ts @@ -1,98 +1,15 @@ -import { Database } from "arangojs" import gql from "graphql-tag" -import { resolveConfig, SubscriptionsMode } from "../server/config" -import QBlockchainData from "../server/data/blockchain" -import QLogs from "../server/logs" -import TONQServer, { DataProviderFactory } from "../server/server" -import { QStats } from "../server/stats" -import { QTracer } from "../server/tracing" +import TONQServer from "../server/server" -import { createTestClient, testConfig } from "./init-tests" -import { - accounts as accountsData, - blocks as blocksData, - messages as messagesData, - transactions as transactionsData, - summary as chainRangesVerificationSummary, -} from "./blockchain-data" -import { BocStorage } from "../server/data/boc-storage" +import { createTestClient } from "./init-tests" +import { createTestData, startTestServer } from "./blockchain-mock" -const TEST_DB_NAME = "Q-server_test_db" let server: TONQServer | null = null beforeAll(async () => { - let serverAddress = - process.env.Q_DATA_MUT ?? - process.env.Q_ACCOUNTS ?? - "http://localhost:8901" - - // prepare db - const db = new Database(serverAddress) - try { - await db.dropDatabase(TEST_DB_NAME) - } catch (err) { - console.log(err) - // do nothing - } - await db.createDatabase(TEST_DB_NAME) - await db.useDatabase(TEST_DB_NAME) - const blocks = db.collection("blocks") - await blocks.create() - await blocks.save(blocksData) - const messages = db.collection("messages") - await messages.create() - await messages.save(messagesData) - const transactions = db.collection("transactions") - await transactions.create() - await transactions.save(transactionsData) - const accounts = db.collection("accounts") - await accounts.create() - await accounts.save(accountsData) - const crv = db.collection("chain_ranges_verification") - await crv.create() - await crv.save(chainRangesVerificationSummary) - - // prepare TONQServer - const url = new URL(serverAddress) - url.searchParams.set("name", TEST_DB_NAME) - serverAddress = url.toString() - - const config = resolveConfig( - {}, - { - blockchain: { - accounts: [serverAddress], - blocks: { - hot: [serverAddress], - }, - transactions: { - hot: [serverAddress], - }, - }, - chainRangesVerification: [serverAddress], - }, - {}, - ) - const providers = new DataProviderFactory(config, new QLogs()) - const blockchainData = new QBlockchainData({ - providers: providers.ensure(), - logs: new QLogs(), - tracer: QTracer.create(testConfig), - stats: QStats.create("", [], 0), - bocStorage: new BocStorage(config.blockBocs), - isTests: true, - subscriptionsMode: SubscriptionsMode.Arango, - filterConfig: config.queries.filter, - ignoreMessagesForLatency: false, - }) - - server = new TONQServer({ - config: testConfig, - logs: new QLogs(), - data: blockchainData, - }) - await server.start() + await createTestData() + server = await startTestServer() }) afterAll(async () => { @@ -1855,7 +1772,7 @@ test("blockchain.account.transactions. Invalid account should throw", async () = `, }) expect(true).toBe(false) // this line should be unreachable! - } catch (err) { + } catch (err: any) { expect(err.constructor.name).toBe("ApolloError") const { message, extensions } = err.graphQLErrors[0] expect(extensions.code).toEqual("GRAPHQL_VALIDATION_FAILED") diff --git a/src/__tests__/cache-key.ts b/src/__tests__/cache-key.ts index 96b9392..5628121 100644 --- a/src/__tests__/cache-key.ts +++ b/src/__tests__/cache-key.ts @@ -240,7 +240,7 @@ describe("DataCache", () => { }) afterEach(async () => { - // This hack to stop server if some of tests fails + // This hack to stop server if some test fails try { await server.stop() } finally { diff --git a/src/__tests__/cloud15.ts b/src/__tests__/cloud15.ts new file mode 100644 index 0000000..95af23a --- /dev/null +++ b/src/__tests__/cloud15.ts @@ -0,0 +1,256 @@ +import { createTestData, TestSetup } from "./blockchain-mock" +import { accounts, transactions } from "./blockchain-mock-data" +import { + BlockchainAccount, + BlockchainTransaction, +} from "../server/graphql/blockchain/resolvers-types-generated" +import { toU64String } from "../server/utils" + +beforeAll(async () => { + await createTestData() +}) + +const mockAcc = { + id: "accounts/0:aaa5bc9cc88f3965b258e14ac9c99b61c9c55d3394d001969c3f5b36b35d07ef", + _key: "0:aaa5bc9cc88f3965b258e14ac9c99b61c9c55d3394d001969c3f5b36b35d07ef", + workchain_id: 0, + boc: "te6ccgECEwEAAtEAAm/ACqpbycyI85ZbJY4UrJyZthycVdM5TQAZacP1s2s10H7yJoURQyWsOoAAAAAAAAAAMQdIC1ATQAYBAWGAAADEsoxIzAAAAAAADbugVptUh94vSUj+5DUD/DWpPFXxmwjBE7eKNHS9J10IXlBgAgIDzyAFAwEB3gQAA9AgAEHa02qQ+8XpKR/chqB/hrUnir4zYRgidvFGjpek66ELygwCJv8A9KQgIsABkvSg4YrtU1gw9KEJBwEK9KQg9KEIAAACASAMCgH+/38h1SDHAZFwjhIggQIA1yHXC/8i+QFTIfkQ8qjiItMf0z81IHBwcO1E0PQEATQggQCA10WY0z8BM9M/ATKWgggbd0Ay4nAjJrmOJCX4I4ED6KgkoLmOF8glAfQAJs8LPyPPCz8izxYgye1UfzIw3t4FXwWZJCLxQAFfCtsw4AsADIA08vBfCgIBIBANAQm8waZuzA4B/nDtRND0BAEyINaAMu1HIm+MI2+MIW+MIO1XXwRwaHWhYH+6lWh4oWAx3u1HbxHXC/+68uBk+AD6QNN/0gAwIcIAIJcwIfgnbxC53vLgZSIiInDIcc8LASLPCgBxz0D4KM8WJM8WI/oCcc9AcPoCcPoCgEDPQPgjzwsfcs9AIMkPABYi+wBfBV8DcGrbMAIBSBIRAOu4iQAnXaiaBBAgEFrovk5gHwAdqPkQICAZ6Bk6DfGAPoCLLfGdquAmDh2o7eJQCB6B3lFa4X/9qOQN4iYAORl/+ToN6j2q/ajkDeJZHoALBBjgMcIGDhnhZ/BBA27oGeFn7jnoMrnizjnoPEAt4jni2T2qjg1QAMrccCHXSSDBII4rIMAAjhwj0HPXIdcLACDAAZbbMF8H2zCW2zBfB9sw4wTZltswXwbbMOME2eAi0x80IHS7II4VMCCCEP////+6IJkwIIIQ/////rrf35bbMF8H2zDgIyHxQAFfBw==", + last_paid: 1689618256, + bits: "0x1445", + cells: "0x13", + public_cells: "0x0", + last_trans_lt: "0xc", + balance: "0x1d202d40", + code: "te6ccgECDQEAAjAAAib/APSkICLAAZL0oOGK7VNYMPShAwEBCvSkIPShAgAAAgEgBgQB/v9/IdUgxwGRcI4SIIECANch1wv/IvkBUyH5EPKo4iLTH9M/NSBwcHDtRND0BAE0IIEAgNdFmNM/ATPTPwEyloIIG3dAMuJwIya5jiQl+COBA+ioJKC5jhfIJQH0ACbPCz8jzws/Is8WIMntVH8yMN7eBV8FmSQi8UABXwrbMOAFAAyANPLwXwoCASAKBwEJvMGmbswIAf5w7UTQ9AQBMiDWgDLtRyJvjCNvjCFvjCDtV18EcGh1oWB/upVoeKFgMd7tR28R1wv/uvLgZPgA+kDTf9IAMCHCACCXMCH4J28Qud7y4GUiIiJwyHHPCwEizwoAcc9A+CjPFiTPFiP6AnHPQHD6AnD6AoBAz0D4I88LH3LPQCDJCQAWIvsAXwVfA3Bq2zACAUgMCwDruIkAJ12omgQQIBBa6L5OYB8AHaj5ECAgGegZOg3xgD6Aiy3xnargJg4dqO3iUAgegd5RWuF//ajkDeImADkZf/k6Deo9qv2o5A3iWR6ACwQY4DHCBg4Z4WfwQQNu6BnhZ+456DK54s456DxALeI54tk9qo4NUADK3HAh10kgwSCOKyDAAI4cI9Bz1yHXCwAgwAGW2zBfB9swltswXwfbMOME2ZbbMF8G2zDjBNngItMfNCB0uyCOFTAgghD/////uiCZMCCCEP////6639+W2zBfB9sw4CMh8UABXwc=", + code_hash: + "98196905d4f1d250741ab885ac2411e0a547c72486f613d8cb5f302fd9d51c6a", + data: "te6ccgEBBQEAZQABYYAAAMSyjEjMAAAAAAANu6BWm1SH3i9JSP7kNQP8Nak8VfGbCMETt4o0dL0nXQheUGABAgPPIAQCAQHeAwAD0CAAQdrTapD7xekpH9yGoH+GtSeKvjNhGCJ28UaOl6TroQvKDA==", + data_hash: + "3e86879954d46cb6879303fac1161c787bb16edcc3f42039fbdf725c21c44e8d", + acc_type: 1, +} + +test("cloud15.account-provider", async () => { + const test1 = await TestSetup.create({ port: 1 }) + + const refAccount = (await test1.query(`accounts { id boc data code }`)) + .accounts[0] + + const query = `account(address: "${refAccount.id}") { info { boc data code } }` + const queryResult1 = (await test1.queryBlockchain(query)) as any + const account1 = queryResult1.account.info + expect(account1.boc).toBe(refAccount.boc) + expect(account1.data).toBe(refAccount.data) + expect(account1.code).toBe(refAccount.code) + await test1.close() + + const test2 = await TestSetup.create({ + port: 2, + accounts: { + [refAccount.id]: mockAcc.boc, + }, + }) + const queryResult2 = (await test2.queryBlockchain(query)) as any + const account2 = queryResult2.account.info + expect(account2.boc).toBe(mockAcc.boc) + expect(account2.data).toBe(mockAcc.data) + expect(account2.code).toBe(mockAcc.code) + await test2.close() +}) + +test("cloud15.unavailable-account-provider", async () => { + const testAcc = accounts.find( + x => + x._key === + "0:198880de2ac28bcf71ab8082d7132d22c337879351cae8b48dd397aadf12f206", + ) as BlockchainAccount + const test = await TestSetup.create({ + port: 2, + accounts: { + [testAcc._key]: mockAcc.boc, + }, + }) + test.accountProvider.close() + try { + const r = await test.queryBlockchain( + `account(address: "${testAcc._key}") { info { boc data code } }`, + ) + fail(`error expected but result received: ${JSON.stringify(r)}`) + } catch (err) { + console.log("Unavailable Evernode RPC error: ", err.message) + } +}) + +test("cloud15.joins", async () => { + const testAcc = accounts.find( + x => + x._key === + "0:198880de2ac28bcf71ab8082d7132d22c337879351cae8b48dd397aadf12f206", + ) as BlockchainAccount + + async function testJoins(options: { + withArchiveDb: boolean + queryArchive: boolean + expectedBoc: string + accounts?: { [hash: string]: string } + }) { + const test = await TestSetup.create({ + withArchiveDb: options.withArchiveDb, + accounts: options.accounts, + }) + const archive = options.queryArchive + const queryResult = (await test.queryBlockchain( + ` + transaction(hash: "a1725e48f08eb5b4e07eaaa1979204b02385f351a4485d192f2ef6775ec7b2ca" archive: ${archive}) { + account { balance boc } + } + messageSrc: message(hash: "7a1234b3331c9ac515501c0ab46d480d68a066e402f445fd6592a07a9e7c79f2" archive: ${archive}) { + src_account { boc } + } + messageDst: message(hash: "32c75632aebfb890145477374cb265e2572d513fccbc7f5f58e108531fa42022" archive: ${archive}) { + dst_account { boc } + } + `, + )) as any + const tr = queryResult.transaction + expect(tr.account.boc).toBe(options.expectedBoc) + const messageSrc = queryResult.messageSrc + expect(messageSrc.src_account.boc).toBe(options.expectedBoc) + const messageDst = queryResult.messageDst + expect(messageDst.dst_account.boc).toBe(options.expectedBoc) + await test.close() + } + + await testJoins({ + withArchiveDb: false, + queryArchive: false, + expectedBoc: testAcc.boc ?? "", + }) + await testJoins({ + withArchiveDb: false, + queryArchive: true, + expectedBoc: testAcc.boc ?? "", + }) + await testJoins({ + withArchiveDb: true, + queryArchive: false, + expectedBoc: testAcc.boc ?? "", + }) + await testJoins({ + withArchiveDb: true, + queryArchive: true, + expectedBoc: testAcc.boc ?? "", + }) + + const mockAccounts = { + [testAcc._key]: mockAcc.boc, + } + + await testJoins({ + withArchiveDb: false, + queryArchive: false, + expectedBoc: mockAcc.boc, + accounts: mockAccounts, + }) + await testJoins({ + withArchiveDb: false, + queryArchive: true, + expectedBoc: mockAcc.boc, + accounts: mockAccounts, + }) + await testJoins({ + withArchiveDb: true, + queryArchive: false, + expectedBoc: mockAcc.boc, + accounts: mockAccounts, + }) + await testJoins({ + withArchiveDb: true, + queryArchive: true, + expectedBoc: mockAcc.boc, + accounts: mockAccounts, + }) +}) + +test("cloud15.boc-parsing", async () => { + const test = await TestSetup.create({ + withArchiveDb: true, + }) + async function testTr(hash: string) { + const mockTr = transactions.find( + x => x._key === hash, + ) as BlockchainTransaction + + const queryResult = (await test.queryBlockchain( + ` + transaction(hash: "${mockTr._key}" archive: true) { + lt workchain_id block_id action { action_list_hash } + } + `, + )) as any + const tr = queryResult.transaction + expect(tr.workchain_id).toBe(mockTr.workchain_id) + expect(tr.block_id).toBe(mockTr.block_id) + expect(toU64String(BigInt(tr.lt))).toBe(mockTr.lt) + expect(tr.action.action_list_hash).toBe(mockTr.action?.action_list_hash) + } + await testTr( + "a1725e48f08eb5b4e07eaaa1979204b02385f351a4485d192f2ef6775ec7b2ca", + ) + await testTr( + "d80e4a907b2405a1141e6f9953abbd175a2393ca04ac1e59aae07297c1637afc", + ) + await testTr( + "1217653452696b932502327b024084a0d70b2bb146720836355eda22864f49a3", + ) + await test.close() +}) + +test("cloud15.pagination", async () => { + const test = await TestSetup.create({ + withArchiveDb: true, + }) + async function testPagination(archive: boolean) { + const queryResult = (await test.queryBlockchain( + ` + account( + address: "-1:3333333333333333333333333333333333333333333333333333333333333333" + ) { + transactions_by_lt(last: 5, before: "", archive: ${archive}) { + edges { + node { + now_string + } + } + pageInfo { + endCursor + startCursor + hasNextPage + hasPreviousPage + } + } + } + `, + )) as any + const page = queryResult.account.transactions_by_lt + expect(page.pageInfo.startCursor).toBe("ad36a72ae001") + expect(page.pageInfo.endCursor).toBe("ad36a7496483") + } + await testPagination(true) + await testPagination(false) + await test.close() +}) + +test("cloud15.master_ranges", async () => { + const test = await TestSetup.create({ + withArchiveDb: true, + }) + const range1 = (await test.queryBlockchain( + `master_seq_no_range(time_start: 1622099906 time_end: 1622099910 archive: false) { start end }`, + )) as any + const range2 = (await test.queryBlockchain( + `master_seq_no_range(time_start: 1622099906 time_end: 1622099910 archive: true) { start end }`, + )) as any + expect(range1.master_seq_no_range).toEqual(range2.master_seq_no_range) + await test.close() +}) diff --git a/src/__tests__/gen-ql-tests.ts b/src/__tests__/gen-ql-tests.ts index 2695df5..4a55811 100644 --- a/src/__tests__/gen-ql-tests.ts +++ b/src/__tests__/gen-ql-tests.ts @@ -17,7 +17,7 @@ import { import { FieldNode, SelectionNode } from "graphql" import { FilterOrConversion } from "../server/config" import { QCollectionQuery } from "../server/data/collection-query" -import { overrideAccountBoc } from "../server/graphql/account-boc" +import { overrideAccountBocFilter } from "../server/graphql/account-boc" type Blocks = { blocks: { @@ -80,7 +80,7 @@ test("{in: null} should raise helpful error message", async () => { blocks(filter:{id:{in:null}}) { id } } `) - } catch (err) { + } catch (err: any) { expect( err.message.startsWith("Cannot read properties of null"), ).toBeFalsy() @@ -495,7 +495,7 @@ test("Generate AQL", () => { }) test("Account BOC versioning", () => { - overrideAccountBoc() + overrideAccountBocFilter() const e2 = Account.returnExpressions( { expectedAccountBocVersion: 2, diff --git a/src/__tests__/init-tests.ts b/src/__tests__/init-tests.ts index a3a053e..b195103 100644 --- a/src/__tests__/init-tests.ts +++ b/src/__tests__/init-tests.ts @@ -33,7 +33,8 @@ import { FieldNode, OperationDefinitionNode } from "graphql" import { httpUrl, assignDeep, cloneDeep } from "../server/utils" import fetch from "node-fetch" import { QCollectionQuery } from "../server/data/collection-query" -import { BocStorage } from "../server/data/boc-storage" +import { createBocProvider } from "../server/data/boc-provider" +import { createAccountProvider } from "../server/data/account-provider" jest.setTimeout(100000) @@ -103,10 +104,12 @@ interface SubscriptionClientPrivate { export function createTestClient(options: { useWebSockets: boolean + port?: number }): ApolloClient { const useHttp = !options.useWebSockets - - const url = `${testConfig.server.host}:${testConfig.server.port}/graphql` + const url = `${testConfig.server.host}:${ + options.port ?? testConfig.server.port + }/graphql` const subscriptionClient = new SubscriptionClient( `ws://${url}`, {}, @@ -253,7 +256,8 @@ export function createLocalArangoTestData(logs: QLogs): QBlockchainData { logs: new QLogs(), tracer: QTracer.create(testConfig), stats: QStats.create("", [], 0), - bocStorage: new BocStorage(config.blockBocs), + blockBocProvider: createBocProvider(config.blockBocs), + accountProvider: createAccountProvider(config.accountProvider), isTests: true, subscriptionsMode: SubscriptionsMode.Arango, filterConfig: config.queries.filter, @@ -337,7 +341,7 @@ export function createTestData(providers: QDataProviders): QBlockchainData { logs: new QLogs(), tracer: QTracer.create(testConfig), stats: QStats.create("", [], 0), - bocStorage: new BocStorage(testConfig.blockBocs), + blockBocProvider: createBocProvider(testConfig.blockBocs), isTests: true, subscriptionsMode: SubscriptionsMode.Arango, filterConfig: testConfig.queries.filter, diff --git a/src/__tests__/request-tests.ts b/src/__tests__/request-tests.ts index c90e73e..61bc93a 100644 --- a/src/__tests__/request-tests.ts +++ b/src/__tests__/request-tests.ts @@ -186,7 +186,7 @@ test("Post extra large request with default limit", async () => { await testServerRequired() try { await postRequest(randomRequest(66000)) - } catch (error) { + } catch (error: any) { expect(error.message.includes("is too large")).toBeTruthy() } }) @@ -200,7 +200,7 @@ test("Post extra large request with configured limit", async () => { }) try { await postRequest(randomRequest(10000)) - } catch (error) { + } catch (error: any) { expect(error.message.includes("is too large")).toBeTruthy() } }) diff --git a/src/maintanance/arango-update/index.ts b/src/maintanance/arango-update/index.ts index a600b14..3577692 100644 --- a/src/maintanance/arango-update/index.ts +++ b/src/maintanance/arango-update/index.ts @@ -50,7 +50,7 @@ async function updateCollection( try { await dbCollection.createPersistentIndex(required.fields) indexCreated = true - } catch (error) { + } catch (error: any) { if (error.message.toLowerCase().indexOf("timeout") >= 0) { console.log( `Index creation failed: ${error.message}. Retrying...`, diff --git a/src/server/cached-data.ts b/src/server/cached-data.ts index 7f79129..6f9d994 100644 --- a/src/server/cached-data.ts +++ b/src/server/cached-data.ts @@ -60,7 +60,7 @@ export abstract class CachedData { void (async () => { try { this.update(await this.loadActual()) - } catch (err) { + } catch (err: any) { this.completeRefresh(x => x.reject(err)) } })() diff --git a/src/server/config-param.ts b/src/server/config-param.ts index 5a382f9..ca9cadc 100644 --- a/src/server/config-param.ts +++ b/src/server/config-param.ts @@ -96,6 +96,16 @@ type BocResolverParams = { } } +type AccountProviderParams = { + evernodeRpc: { + endpoint: ConfigParam + } + arango: { + database: ConfigParam + collection: ConfigParam + } +} + export class ConfigParam { optionName: string env: string @@ -265,6 +275,30 @@ export class ConfigParam { } } + static accountProvider(): AccountProviderParams { + return { + evernodeRpc: { + endpoint: ConfigParam.string( + "accounts-evernode-rpc-endpoint", + "", + "Accounts Evernode RPC endpoint", + ), + }, + arango: { + database: ConfigParam.string( + "accounts-arango-database", + "", + "Accounts ArangoDB url", + ), + collection: ConfigParam.string( + "accounts-arango-collection", + "accounts", + "Accounts ArangoDB collection", + ), + }, + } + } + static blockchain(prefix: string): BlockchainParams { const zerostatePrefix = withPrefix(prefix, "zerostate") return { @@ -324,7 +358,7 @@ export class ConfigParam { } try { return this.parser(value.toString().trim()) - } catch (error) { + } catch (error: any) { throw QError.invalidConfigValue( this.option, error.message ?? error.toString(), diff --git a/src/server/config.ts b/src/server/config.ts index 8f73db2..2fce527 100644 --- a/src/server/config.ts +++ b/src/server/config.ts @@ -64,6 +64,7 @@ export type QConfig = { blockchain: QBlockchainDataConfig counterparties: string[] blockBocs: QBocResolverConfig + accountProvider: QAccountProviderConfig chainRangesVerification: string[] ignoreMessagesForLatency: boolean @@ -109,6 +110,18 @@ export type QBocResolverConfig = { } } +export type QAccountProviderConfig = { + // Evernode RPC compatible service + evernodeRpc?: { + endpoint: string + } + // ArangoDB accounts collection + arango?: { + database: string + collection: string + } +} + export type QBlockchainDataConfig = { hotCache?: string hotCacheExpiration: number @@ -317,7 +330,7 @@ export const configParams = { chainRangesVerification: ConfigParam.databases("chain ranges verification"), slowQueriesBlockchain: ConfigParam.blockchain("slow queries"), blockBocs: ConfigParam.bocResolver("block-bocs"), - + accountProvider: ConfigParam.accountProvider(), jaeger: { endpoint: ConfigParam.string("jaeger-endpoint", "", "Jaeger endpoint"), service: ConfigParam.string( diff --git a/src/server/data/account-provider.ts b/src/server/data/account-provider.ts new file mode 100644 index 0000000..2656c63 --- /dev/null +++ b/src/server/data/account-provider.ts @@ -0,0 +1,82 @@ +import { parseArangoConfig, QAccountProviderConfig } from "../config" +import { Database } from "arangojs" +import { createDatabase } from "./database-provider" +import { RequestManager, HTTPTransport, Client } from "@open-rpc/client-js" + +export interface IAccountProvider { + getBocs(addresses: string[]): Promise> +} + +class NodeRpcProvider implements IAccountProvider { + client: Client + constructor( + public config: { + endpoint: string + }, + ) { + const transport = new HTTPTransport(config.endpoint) + this.client = new Client(new RequestManager([transport])) + } + async getBocs(addresses: string[]): Promise> { + const resolved = new Map() + // TODO: fetch bocs in parallel + for (const address of addresses) { + const result = await this.client.request({ + method: "getAccount", + params: { + account: address, + }, + }) + if (result?.account_boc ?? "" !== "") { + resolved.set(address, result.account_boc) + } + } + return resolved + } +} + +class ArangoProvider implements IAccountProvider { + private readonly database: Database + constructor( + public config: { + database: string + collection: string + }, + ) { + this.database = createDatabase(parseArangoConfig(config.database)) + } + + async getBocs(addresses: string[]): Promise> { + const resolved = new Map() + const cursor = await this.database.query( + ` + FOR doc IN ${this.config.collection} + FILTER doc._key IN @addresses + RETURN { address: doc._key, boc: doc.boc } + `, + { + addresses, + }, + ) + const docs: { address: string; boc: string }[] = await cursor.all() + for (const doc of docs) { + resolved.set(doc.address, doc.boc) + } + return resolved + } +} + +export function createAccountProvider( + config: QAccountProviderConfig, +): IAccountProvider | undefined { + const rpcEndpoint = config.evernodeRpc?.endpoint ?? "" + if (rpcEndpoint !== "") { + return new NodeRpcProvider({ + endpoint: rpcEndpoint, + }) + } + if (config.arango && (config.arango.database ?? "" !== "")) { + return new ArangoProvider(config.arango) + } + return undefined +} diff --git a/src/server/data/blockchain.ts b/src/server/data/blockchain.ts index 45af275..b18e31f 100644 --- a/src/server/data/blockchain.ts +++ b/src/server/data/blockchain.ts @@ -250,6 +250,7 @@ export default class QBlockchainData extends QData { // Spec: reliable_chain_order_upper_boundary = U64String(last_reliable_mc_seq_no + 1) async getReliableChainOrderUpperBoundary( context: QRequestContext, + archive: boolean, ): Promise { const now = Date.now() if (now < this.reliableChainOrderUpperBoundary.lastCheckTime + 1000) { @@ -265,6 +266,7 @@ export default class QBlockchainData extends QData { orderBy: [], request: context, traceSpan: context.requestSpan, + archive, })) as ChainRangesVerificationSummary[] if (result.length > 0) { const boundary = result.reduce((prev, summary) => { @@ -299,6 +301,7 @@ export default class QBlockchainData extends QData { orderBy: [], request: context, traceSpan: context.requestSpan, + archive, })) as number[] if (result.length > 0) { const mc_seq_no = result.reduce((prev, curr) => diff --git a/src/server/data/boc-provider.ts b/src/server/data/boc-provider.ts new file mode 100644 index 0000000..0235e68 --- /dev/null +++ b/src/server/data/boc-provider.ts @@ -0,0 +1,129 @@ +import { parseArangoConfig, QBocResolverConfig } from "../config" +import { S3 } from "@aws-sdk/client-s3" +import { Database } from "arangojs" +import { createDatabase } from "./database-provider" + +export interface IBocProvider { + getBocs( + bocHashes: { hash: string; boc: string | null | undefined }[], + ): Promise> +} + +class PatternProvider implements IBocProvider { + constructor(public pattern: string) {} + async getBocs( + bocHashes: { + hash: string + boc: string | undefined + }[], + ): Promise> { + const resolved = new Map() + for (const { hash } of bocHashes) { + resolved.set(hash, this.pattern.replace("{hash}", hash)) + } + return resolved + } +} + +class S3Provider implements IBocProvider { + private readonly client: S3 + constructor( + public config: { + endpoint: string + region: string + bucket: string + accessKey: string + secretKey: string + }, + ) { + this.client = new S3({ + endpoint: config.endpoint, + region: config.region, + credentials: { + accessKeyId: config.accessKey, + secretAccessKey: config.secretKey, + }, + }) + } + async getBocs( + bocHashes: { + hash: string + boc: string | undefined + }[], + ): Promise> { + const resolved = new Map() + // TODO: fetch bocs in parallel + for (const { hash, boc } of bocHashes) { + const getObjectResult = await this.client.getObject({ + Bucket: this.config.bucket, + Key: hash, + }) + const body = getObjectResult.Body + const bodyAsString = await body?.transformToString("base64") + resolved.set(hash, bodyAsString ?? boc) + } + return resolved + } +} + +class ArangoProvider implements IBocProvider { + private readonly database: Database + constructor( + public config: { + database: string + collection: string + }, + ) { + this.database = createDatabase(parseArangoConfig(config.database)) + } + + async getBocs( + bocHashes: { + hash: string + boc: string | undefined + }[], + ): Promise> { + const resolved = new Map() + const cursor = await this.database.query( + ` + FOR doc IN ${this.config.collection} + FILTER doc._key IN @hashes + RETURN { + hash: doc._key, + boc: doc.boc + } + `, + { + hashes: bocHashes.map(x => x.hash), + }, + ) + const docs: { hash: string; boc: string }[] = await cursor.all() + for (const doc of docs) { + resolved.set(doc.hash, doc.boc) + } + return resolved + } +} + +export function createBocProvider( + config: QBocResolverConfig, +): IBocProvider | undefined { + const s3endpoint = config.s3?.endpoint ?? "" + if (s3endpoint !== "") { + return new S3Provider({ + endpoint: s3endpoint, + bucket: config.s3?.bucket ?? "", + region: config.s3?.region ?? "", + accessKey: config.s3?.accessKey ?? "", + secretKey: config.s3?.secretKey ?? "", + }) + } + const pattern = config.pattern ?? "" + if (pattern) { + return new PatternProvider(pattern) + } + if (config.arango && (config.arango.database ?? "" !== "")) { + return new ArangoProvider(config.arango) + } + return undefined +} diff --git a/src/server/data/caching/memjs-datacache.ts b/src/server/data/caching/memjs-datacache.ts index 3164df9..2326a0c 100644 --- a/src/server/data/caching/memjs-datacache.ts +++ b/src/server/data/caching/memjs-datacache.ts @@ -33,7 +33,7 @@ export class MemjsDataCache implements QDataCache { const value = data ? JSON.parse(data.toString()) : null this.log.debug("GET", hashedKey) resolve(value) - } catch (e) { + } catch (e: any) { this.log.error( "FAILED", "MEMCACHED", diff --git a/src/server/data/data.ts b/src/server/data/data.ts index 86b56ef..bee5198 100644 --- a/src/server/data/data.ts +++ b/src/server/data/data.ts @@ -28,8 +28,9 @@ import type { QDataProviderQueryParams, QIndexInfo, } from "./data-provider" -import { BocStorage } from "./boc-storage" +import { IBocProvider } from "./boc-provider" import { QDatabaseProvider } from "./database-provider" +import { IAccountProvider } from "./account-provider" export type QBlockchainDataProvider = { blocks?: QDataProvider @@ -49,8 +50,8 @@ export type QDataProviders = { export type QDataOptions = { providers: QDataProviders slowQueriesProviders?: QBlockchainDataProvider - bocStorage: BocStorage - + blockBocProvider?: IBocProvider + accountProvider?: IAccountProvider logs: QLogs tracer: Tracer stats: IStats @@ -82,7 +83,8 @@ export default class QData { isTests: boolean filterConfig: FilterConfig subscriptionsMode: SubscriptionsMode - bocStorage: BocStorage + blockBocProvider?: IBocProvider + accountProvider?: IAccountProvider // Own log: QLog @@ -132,7 +134,8 @@ export default class QData { this.slowQueriesProviders?.zerostate, ]) - this.bocStorage = options.bocStorage + this.blockBocProvider = options.blockBocProvider + this.accountProvider = options.accountProvider } addCollection( diff --git a/src/server/graphql/account-boc.ts b/src/server/graphql/account-boc.ts index 90ef4c3..36fddc2 100644 --- a/src/server/graphql/account-boc.ts +++ b/src/server/graphql/account-boc.ts @@ -13,7 +13,7 @@ import { undefinedToNull, } from "../filter/filters" -export function overrideAccountBoc() { +export function overrideAccountBocFilter() { const fields = Account.fields if (fields) { fields["boc"] = { diff --git a/src/server/graphql/block-boc.ts b/src/server/graphql/block-boc.ts new file mode 100644 index 0000000..03d9322 --- /dev/null +++ b/src/server/graphql/block-boc.ts @@ -0,0 +1,117 @@ +import { + CollectionFilter, + convertFilterValue, + filterConditionForFields, + invalidSelection, + QRequestParams, + QReturnExpression, + scalarOps, + StructFilter, + testFields, + undefinedToNull, +} from "../filter/filters" +import { Block } from "./resolvers-generated" +import { SelectionNode } from "graphql" +import { IBocProvider } from "../data/boc-provider" + +export const blockBocResolvers = (blocks: IBocProvider | undefined) => { + if (!blocks) { + return {} + } + return { + Block: { + boc: async (parent: { + _key: string | undefined | null + boc: string | undefined | null + }) => { + if (!parent._key) { + return parent.boc + } + const resolved = await blocks.getBocs([ + { hash: parent._key, boc: parent.boc }, + ]) + return resolved.get(parent._key) ?? parent.boc + }, + }, + BlockchainBlock: { + boc: async (parent: { + _key: string | undefined | null + boc: string | undefined | null + }) => { + if (!parent._key || (parent.boc ?? "" !== "")) { + return parent.boc + } + const resolved = await blocks.getBocs([ + { hash: parent._key, boc: parent.boc }, + ]) + return resolved.get(parent._key) ?? parent.boc + }, + }, + } +} + +export function overrideBlockBocFilter(blocks: IBocProvider | undefined) { + if (!blocks || !Block.fields) { + return + } + Block.fields["boc"] = { + filterCondition(params, path, filter) { + return filterConditionForFields( + path, + filter as StructFilter, + scalarOps, + (op, path, _filterKey, filterValue) => { + const converted = convertFilterValue( + filterValue, + op, + undefined, + ) + return op.filterCondition( + params, + path, + converted as CollectionFilter, + ) + }, + ) + }, + returnExpressions( + _request: QRequestParams, + path: string, + def: SelectionNode, + ): QReturnExpression[] { + if (def.kind !== "Field") { + throw invalidSelection(def.kind) + } + const name = def.name.value + return [ + { + name, + expression: `${path}.${name}`, + }, + { + name: "_key", + expression: `${path}._key`, + }, + ] + }, + test(parent, value, filter) { + return testFields( + value, + filter, + scalarOps, + (op, value, _filterKey, filterValue) => { + const converted = convertFilterValue( + filterValue, + op, + undefined, + ) + return op.test( + parent, + undefinedToNull(value), + converted as CollectionFilter, + ) + }, + ) + }, + } +} diff --git a/src/server/graphql/blockchain/blockchain.ts b/src/server/graphql/blockchain/blockchain.ts index cc9153a..bcfd2af 100644 --- a/src/server/graphql/blockchain/blockchain.ts +++ b/src/server/graphql/blockchain/blockchain.ts @@ -25,6 +25,7 @@ import { import { isDefined } from "./helpers" import { resolveAddress } from "../../address" import { ValidationError } from "apollo-server-errors" +import { useBlocksArchive } from "../../data/data-provider" // UUID is a hack to bypass QDataCombiner deduplication const MASTER_SEQ_NO_RANGE_QUERY = ` @@ -64,6 +65,7 @@ async function resolve_maser_seq_no_range( ) } + const useArchive = useBlocksArchive(args.archive, context) const result = (await context.services.data.query( required(context.services.data.blocks.provider), { @@ -75,6 +77,7 @@ async function resolve_maser_seq_no_range( orderBy: [], request: context, traceSpan, + archive: useArchive, }, )) as { first: number | null @@ -101,7 +104,10 @@ async function resolve_maser_seq_no_range( // reliable boundary const reliable = - await context.services.data.getReliableChainOrderUpperBoundary(context) + await context.services.data.getReliableChainOrderUpperBoundary( + context, + useArchive, + ) const max_end = parseMasterSeqNo(reliable.boundary) // Edge cases: diff --git a/src/server/graphql/blockchain/boc-parsers.ts b/src/server/graphql/blockchain/boc-parsers.ts index 69c8523..9fe9de4 100644 --- a/src/server/graphql/blockchain/boc-parsers.ts +++ b/src/server/graphql/blockchain/boc-parsers.ts @@ -7,8 +7,9 @@ import { } from "./resolvers-types-generated" import { QRequestContext } from "../../request" import { BocModule } from "@eversdk/core" +import { toU64String } from "../../utils" -const blockArchiveFields = new Set([ +export const blockArchiveFields = new Set([ "id", "hash", "boc", @@ -19,6 +20,7 @@ const blockArchiveFields = new Set([ "master.shard_hashes.descr.seq_no", "master.shard_hashes.shard", "master.shard_hashes.workchain_id", + "master.min_shard_gen_utime", "prev_alt_ref.root_hash", "prev_key_block_seqno", "prev_ref.root_hash", @@ -111,7 +113,7 @@ export function getBlocksPostProcessing( selection, blockArchiveFields, ) - const useBlockBocStorage = !!context.services.data.bocStorage.blocks + const useBlockBocStorage = !!context.services.data.blockBocProvider const resolveBocs = parseBocs || (useBlockBocStorage && selectionContains(selection, "boc")) return { @@ -150,7 +152,7 @@ function selectionContainsNonArchivedFields( return false } -function selectionContains(selection: SelectionSetNode, field: string) { +export function selectionContains(selection: SelectionSetNode, field: string) { return !!selection.selections.find( x => x.kind === "Field" && x.name.value === field, ) @@ -181,9 +183,9 @@ export async function postProcessBlocks( context: QRequestContext, blocks: BlockchainBlock[], ): Promise { - const blocksStorage = context.services.data.bocStorage.blocks + const blocksStorage = context.services.data.blockBocProvider if (postProcessing.resolveBocs && blocksStorage) { - const bocs = await blocksStorage.resolveBocs( + const bocs = await blocksStorage.getBocs( blocks.map(x => ({ hash: x._key, boc: x.boc, @@ -220,7 +222,11 @@ async function parseTransaction( sdk: BocModule, boc: string, ): Promise { - return (await sdk.parse_transaction({ boc })).parsed + const parsed = (await sdk.parse_transaction({ boc })).parsed + if (parsed.lt && parsed.lt.startsWith("0x")) { + parsed.lt = toU64String(BigInt(parsed.lt)) + } + return parsed } export async function parseMessageBocsIfRequired( diff --git a/src/server/graphql/blockchain/config.ts b/src/server/graphql/blockchain/config.ts index c4ba3b5..ba43fee 100644 --- a/src/server/graphql/blockchain/config.ts +++ b/src/server/graphql/blockchain/config.ts @@ -7,12 +7,7 @@ import { QRequestContext } from "../../request" import { QTraceSpan } from "../../tracing" import { QError, required } from "../../utils" -import { - getFieldSelectionSet, - isDefined, - KeyOf, - KeyOfWithValueOf, -} from "./helpers" +import { getFieldSelectionSet, KeyOf, KeyOfWithValueOf } from "./helpers" import { BlockchainAccount, BlockchainBlock, @@ -90,52 +85,53 @@ export const config: Config = { `FILTER ${path}.in_msg IN @${onFieldParam} ` + `RETURN ${returnExpression}`, }, - { - targetField: "src_account", - additionalFields: ["msg_type"], - pathForQuery: "acc", - joinedCollection: "accounts", - prefetchQueryBuilder: ( - parentPath, - joinPath, - returnExpression, - ) => - `(${parentPath}.msg_type != 1 ? ` + - `(FOR ${joinPath} IN accounts ` + - `FILTER ${joinPath}._key == ${parentPath}.src ` + - `RETURN ${returnExpression})[0] ` + - `: null)`, - needFetch: m => !m.src_account && m.msg_type != 1, - onField: "src", - refOnField: "_key", - queryBuilder: (path, onFieldParam, returnExpression) => - `FOR ${path} in accounts ` + - `FILTER ${path}._key IN @${onFieldParam} ` + - `RETURN ${returnExpression}`, - }, - { - targetField: "dst_account", - additionalFields: ["msg_type"], - pathForQuery: "acc", - joinedCollection: "accounts", - prefetchQueryBuilder: ( - parentPath, - joinPath, - returnExpression, - ) => - `(${parentPath}.msg_type != 2 ? ` + - `(FOR ${joinPath} IN accounts ` + - `FILTER ${joinPath}._key == ${parentPath}.dst ` + - `RETURN ${returnExpression})[0] ` + - `: null)`, - needFetch: m => !m.dst_account && m.msg_type != 2, - onField: "dst", - refOnField: "_key", - queryBuilder: (path, onFieldParam, returnExpression) => - `FOR ${path} in accounts ` + - `FILTER ${path}._key IN @${onFieldParam} ` + - `RETURN ${returnExpression}`, - }, + // TODO: + // { + // targetField: "src_account", + // additionalFields: ["msg_type"], + // pathForQuery: "acc", + // joinedCollection: "accounts", + // prefetchQueryBuilder: ( + // parentPath, + // joinPath, + // returnExpression, + // ) => + // `(${parentPath}.msg_type != 1 ? ` + + // `(FOR ${joinPath} IN accounts ` + + // `FILTER ${joinPath}._key == ${parentPath}.src ` + + // `RETURN ${returnExpression})[0] ` + + // `: null)`, + // needFetch: m => !m.src_account && m.msg_type != 1, + // onField: "src", + // refOnField: "_key", + // queryBuilder: (path, onFieldParam, returnExpression) => + // `FOR ${path} in accounts ` + + // `FILTER ${path}._key IN @${onFieldParam} ` + + // `RETURN ${returnExpression}`, + // }, + // { + // targetField: "dst_account", + // additionalFields: ["msg_type"], + // pathForQuery: "acc", + // joinedCollection: "accounts", + // prefetchQueryBuilder: ( + // parentPath, + // joinPath, + // returnExpression, + // ) => + // `(${parentPath}.msg_type != 2 ? ` + + // `(FOR ${joinPath} IN accounts ` + + // `FILTER ${joinPath}._key == ${parentPath}.dst ` + + // `RETURN ${returnExpression})[0] ` + + // `: null)`, + // needFetch: m => !m.dst_account && m.msg_type != 2, + // onField: "dst", + // refOnField: "_key", + // queryBuilder: (path, onFieldParam, returnExpression) => + // `FOR ${path} in accounts ` + + // `FILTER ${path}._key IN @${onFieldParam} ` + + // `RETURN ${returnExpression}`, + // }, ], }), transactions: compileCollectionConfig({ @@ -143,27 +139,27 @@ export const config: Config = { excludeFields: ["hash"], qDataCollectionSelector: ctx => ctx.services.data.transactions, joins: [ - { - targetField: "account", - additionalFields: ["account_addr"], - pathForQuery: "acc", - joinedCollection: "accounts", - prefetchQueryBuilder: ( - parentPath, - joinPath, - returnExpression, - ) => - `(FOR ${joinPath} IN accounts ` + - `FILTER ${joinPath}._key == ${parentPath}.account_addr ` + - `RETURN ${returnExpression})[0]`, - needFetch: t => !isDefined(t.account), - onField: "account_addr", - refOnField: "_key", - queryBuilder: (path, onFieldParam, returnExpression) => - `FOR ${path} in accounts ` + - `FILTER ${path}._key IN @${onFieldParam} ` + - `RETURN ${returnExpression}`, - }, + // { + // targetField: "account", + // additionalFields: ["account_addr"], + // pathForQuery: "acc", + // joinedCollection: "accounts", + // prefetchQueryBuilder: ( + // parentPath, + // joinPath, + // returnExpression, + // ) => + // `(FOR ${joinPath} IN accounts ` + + // `FILTER ${joinPath}._key == ${parentPath}.account_addr ` + + // `RETURN ${returnExpression})[0]`, + // needFetch: t => !isDefined(t.account), + // onField: "account_addr", + // refOnField: "_key", + // queryBuilder: (path, onFieldParam, returnExpression) => + // `FOR ${path} in accounts ` + + // `FILTER ${path}._key IN @${onFieldParam} ` + + // `RETURN ${returnExpression}`, + // }, { targetField: "in_message", additionalFields: ["in_msg"], @@ -217,7 +213,7 @@ export type CompiledCollectionConfig = { maxJoinDepth: number, path: string, additionalFields?: KeyOf[], - overridenFields?: [fieldName: string, fetcher: string][], + overriddenFields?: [fieldName: string, fetcher: string][], ) => string fetchJoins: ( data: TItem[], @@ -280,7 +276,7 @@ export function compileCollectionConfig( maxJoinDepth: number, path: string, additionalFields?: KeyOf[], - overridenFields?: [fieldName: string, fetcher: string][], + overriddenFields?: [fieldName: string, fetcher: string][], ) => { const returnExpressionsOverrides = new Map() for (const field of collection.alwaysFetchFields ?? []) { @@ -338,7 +334,7 @@ export function compileCollectionConfig( ) } - for (const override of overridenFields ?? []) { + for (const override of overriddenFields ?? []) { returnExpressionsOverrides.set(override[0], override[1]) } diff --git a/src/server/graphql/blockchain/fetchers/accounts.ts b/src/server/graphql/blockchain/fetchers/accounts.ts index c71967b..2a74470 100644 --- a/src/server/graphql/blockchain/fetchers/accounts.ts +++ b/src/server/graphql/blockchain/fetchers/accounts.ts @@ -1,4 +1,4 @@ -import { GraphQLResolveInfo } from "graphql" +import { GraphQLResolveInfo, SelectionSetNode } from "graphql" import { QParams } from "../../../filter/filters" import { QRequestContext } from "../../../request" @@ -7,6 +7,9 @@ import { required } from "../../../utils" import { config } from "../config" import { BlockchainAccount } from "../resolvers-types-generated" +import { selectionContains } from "../boc-parsers" +import { IAccountProvider } from "../../../data/account-provider" +import { BocModule } from "@eversdk/core" export async function resolve_account( address: String, @@ -44,6 +47,15 @@ export async function resolve_account( }, )) as BlockchainAccount[] + const provider = context.services.data.accountProvider + if (provider && selectionSet) { + await getBocFields( + queryResult, + selectionSet, + provider, + context.services.client.boc, + ) + } if (queryResult.length === 0) { queryResult.push({ _key: `${address}`, @@ -63,3 +75,53 @@ export async function resolve_account( return queryResult[0] } + +async function getBocFields( + accounts: BlockchainAccount[], + selection: SelectionSetNode, + provider: IAccountProvider, + sdk: BocModule, +) { + const bocRequested = selectionContains(selection, "boc") + const dataRequested = selectionContains(selection, "data") + const codeRequested = selectionContains(selection, "code") + if (!(bocRequested || dataRequested || codeRequested)) { + return + } + const bocs = await provider.getBocs(accounts.map(x => x._key)) + for (const account of accounts) { + const boc = bocs.get(account._key) + if (!boc) { + continue + } + if (bocRequested) { + account.boc = boc + } + if (dataRequested || codeRequested) { + const parsed = (await sdk.parse_account({ boc })).parsed + if (dataRequested) { + account.data = parsed.data + } + if (codeRequested) { + account.code = parsed.code + } + } + } +} + +export function accountResolver(addressField: string) { + return async ( + parent: Record, + _args: unknown, + context: QRequestContext, + info: GraphQLResolveInfo, + ) => { + return context.trace("blockchain-account-info", async traceSpan => { + const address = parent[addressField] + if (!address) { + return null + } + return resolve_account(address, context, info, traceSpan) + }) + } +} diff --git a/src/server/graphql/blockchain/fetchers/blocks.ts b/src/server/graphql/blockchain/fetchers/blocks.ts index c045e44..bbaf6f1 100644 --- a/src/server/graphql/blockchain/fetchers/blocks.ts +++ b/src/server/graphql/blockchain/fetchers/blocks.ts @@ -233,12 +233,12 @@ export async function resolve_key_blocks( context.services.config.queries.filter.stringifyKeyInAqlComparison, }) - await prepareChainOrderFilter(args, params, filters, context) + const useArchive = useBlocksArchive(args.archive, context) + await prepareChainOrderFilter(args, params, filters, context, useArchive) filters.push("doc.key_block == true") const { direction, limit } = processPaginationArgs(args) - const useArchive = useBlocksArchive(args.archive, context) const selectionSet = getNodeSelectionSetForConnection(info) const postProcessing = getBlocksPostProcessing( context, @@ -308,7 +308,8 @@ export async function resolve_blockchain_blocks( context.services.config.queries.filter.stringifyKeyInAqlComparison, }) - await prepareChainOrderFilter(args, params, filters, context) + const useArchive = useBlocksArchive(args.archive, context) + await prepareChainOrderFilter(args, params, filters, context, useArchive) if (isDefined(args.workchain)) { filters.push(`doc.workchain_id == @${params.add(args.workchain)}`) } @@ -324,7 +325,6 @@ export async function resolve_blockchain_blocks( const { direction, limit } = processPaginationArgs(args) - const useArchive = useBlocksArchive(args.archive, context) const selectionSet = getNodeSelectionSetForConnection(info) const postProcessing = getBlocksPostProcessing( context, diff --git a/src/server/graphql/blockchain/fetchers/messages.ts b/src/server/graphql/blockchain/fetchers/messages.ts index 674ecbf..d75747a 100644 --- a/src/server/graphql/blockchain/fetchers/messages.ts +++ b/src/server/graphql/blockchain/fetchers/messages.ts @@ -166,6 +166,7 @@ export async function resolve_account_messages( params, inboundAndFilters, context, + useArchive, "dst_chain_order", ) const returnExpression = returnExpressionBuilder("dst_chain_order") @@ -221,6 +222,7 @@ export async function resolve_account_messages( params, outboundAndFilters, context, + useArchive, "src_chain_order", ) const returnExpression = returnExpressionBuilder("src_chain_order") diff --git a/src/server/graphql/blockchain/fetchers/transactions.ts b/src/server/graphql/blockchain/fetchers/transactions.ts index 318a929..ed895f6 100644 --- a/src/server/graphql/blockchain/fetchers/transactions.ts +++ b/src/server/graphql/blockchain/fetchers/transactions.ts @@ -146,8 +146,9 @@ export async function resolve_blockchain_transactions( stringifyKeyInAqlComparison: context.services.config.queries.filter.stringifyKeyInAqlComparison, }) + const useArchive = useTransactionsArchive(args.archive, context) - await prepareChainOrderFilter(args, params, filters, context) + await prepareChainOrderFilter(args, params, filters, context, useArchive) if (isDefined(args.workchain)) { filters.push(`doc.workchain_id == @${params.add(args.workchain)}`) @@ -163,7 +164,6 @@ export async function resolve_blockchain_transactions( const { direction, limit } = processPaginationArgs(args) - const useArchive = useTransactionsArchive(args.archive, context) const { selectionSet, requireBocParsing } = upgradeSelectionForBocParsing( useArchive, getNodeSelectionSetForConnection(info), @@ -238,7 +238,8 @@ export async function resolve_account_transactions( context.services.config.queries.filter.stringifyKeyInAqlComparison, }) - await prepareChainOrderFilter(args, params, filters, context) + const useArchive = useTransactionsArchive(args.archive, context) + await prepareChainOrderFilter(args, params, filters, context, useArchive) filters.push(`doc.account_addr == @${params.add(account_address)}`) if (isDefined(args.aborted)) { filters.push(`doc.aborted == @${params.add(args.aborted)}`) @@ -254,7 +255,6 @@ export async function resolve_account_transactions( const { direction, limit } = processPaginationArgs(args) - const useArchive = useTransactionsArchive(args.archive, context) const { selectionSet, requireBocParsing } = upgradeSelectionForBocParsing( useArchive, getNodeSelectionSetForConnection(info), @@ -329,18 +329,19 @@ export async function resolve_account_transactions_by_lt( context.services.config.queries.filter.stringifyKeyInAqlComparison, }) + const useArchive = useTransactionsArchive(args.archive, context) await prepareNonChainOrderPaginationFilter( args, params, filters, context, + useArchive, "lt", ) filters.push(`doc.account_addr == @${params.add(account_address)}`) const { direction, limit } = processPaginationArgs(args) - const useArchive = useTransactionsArchive(args.archive, context) const { selectionSet, requireBocParsing } = upgradeSelectionForBocParsing( useArchive, getNodeSelectionSetForConnection(info), diff --git a/src/server/graphql/blockchain/helpers.ts b/src/server/graphql/blockchain/helpers.ts index 3523d81..30505c8 100644 --- a/src/server/graphql/blockchain/helpers.ts +++ b/src/server/graphql/blockchain/helpers.ts @@ -59,6 +59,7 @@ export async function prepareChainOrderFilter( params: QParams, filters: string[], context: QRequestContext, + archive: boolean, chainOrderFieldName = "chain_order", ) { // master_seq_no @@ -84,6 +85,7 @@ export async function prepareChainOrderFilter( const reliable = await context.services.data.getReliableChainOrderUpperBoundary( context, + archive, ) end_chain_order = @@ -113,6 +115,7 @@ export async function prepareNonChainOrderPaginationFilter( params: QParams, filters: string[], context: QRequestContext, + archive: boolean, paginationFieldName: string, chainOrderFieldName = "chain_order", ) { @@ -129,6 +132,7 @@ export async function prepareNonChainOrderPaginationFilter( const reliable = await context.services.data.getReliableChainOrderUpperBoundary( context, + archive, ) end_chain_order = diff --git a/src/server/graphql/blockchain/resolvers-types-generated.ts b/src/server/graphql/blockchain/resolvers-types-generated.ts index 3c4c926..fa07c74 100644 --- a/src/server/graphql/blockchain/resolvers-types-generated.ts +++ b/src/server/graphql/blockchain/resolvers-types-generated.ts @@ -1001,6 +1001,7 @@ export type BlockchainQueryMessageArgs = { export type BlockchainQueryMaster_Seq_No_RangeArgs = { time_start?: Maybe time_end?: Maybe + archive?: Maybe } export type BlockchainQueryKey_BlocksArgs = { diff --git a/src/server/graphql/blockchain/types/message.ts b/src/server/graphql/blockchain/types/message.ts index c70907d..88702c2 100644 --- a/src/server/graphql/blockchain/types/message.ts +++ b/src/server/graphql/blockchain/types/message.ts @@ -12,6 +12,7 @@ import { MessageTypeEnum, Resolvers, } from "../resolvers-types-generated" +import { accountResolver } from "../fetchers" export const resolvers: Resolvers = { BlockchainMessage: { @@ -66,6 +67,8 @@ export const resolvers: Resolvers = { resolveAddressField(parent.src, args as AddressArgs), dst: (parent, args) => resolveAddressField(parent.dst, args as AddressArgs), + src_account: accountResolver("src"), + dst_account: accountResolver("dst"), master_seq_no: parent => masterSeqNoFromChainOrder( parent.src_chain_order ?? parent.dst_chain_order, diff --git a/src/server/graphql/blockchain/types/transaction.ts b/src/server/graphql/blockchain/types/transaction.ts index 3ae0303..cac4c4c 100644 --- a/src/server/graphql/blockchain/types/transaction.ts +++ b/src/server/graphql/blockchain/types/transaction.ts @@ -13,6 +13,7 @@ import { TransactionProcessingStatusEnum, TransactionTypeEnum, } from "../resolvers-types-generated" +import { accountResolver } from "../fetchers" export const resolvers: Resolvers = { BlockchainTransaction: { @@ -96,6 +97,7 @@ export const resolvers: Resolvers = { }, account_addr: (parent, args) => resolveAddressField(parent.account_addr, args as AddressArgs), + account: accountResolver("account_addr"), master_seq_no: parent => masterSeqNoFromChainOrder(parent.chain_order), }, } diff --git a/src/server/graphql/info.ts b/src/server/graphql/info.ts index d2fc224..687f3bb 100644 --- a/src/server/graphql/info.ts +++ b/src/server/graphql/info.ts @@ -39,6 +39,7 @@ async function info( result.chainOrderBoundary = ( await context.services.data.getReliableChainOrderUpperBoundary( context, + false, ) ).boundary } catch { diff --git a/src/server/server.ts b/src/server/server.ts index eae9ead..446a9ad 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -64,15 +64,16 @@ import { assignDeep, httpUrl, packageJson } from "./utils" import WebSocket from "ws" import { MemStats } from "./mem-stat" import { QRequestContext, QRequestServices, RequestEvent } from "./request" -import { overrideAccountBoc } from "./graphql/account-boc" +import { overrideAccountBocFilter } from "./graphql/account-boc" import { rempResolvers } from "./graphql/remp" import { LiteClient } from "ton-lite-client" import { addMasterSeqNoFilters, masterSeqNoResolvers, } from "./graphql/chain-order" -import { bocResolvers, overrideBocs } from "./graphql/boc-resolvers" -import { BocStorage } from "./data/boc-storage" +import { blockBocResolvers, overrideBlockBocFilter } from "./graphql/block-boc" +import { createAccountProvider } from "./data/account-provider" +import { createBocProvider } from "./data/boc-provider" type QServerOptions = { config: QConfig @@ -351,7 +352,10 @@ export default class TONQServer { this.config.slowQueriesBlockchain, "slow", ), - bocStorage: new BocStorage(this.config.blockBocs), + blockBocProvider: createBocProvider(this.config.blockBocs), + accountProvider: createAccountProvider( + this.config.accountProvider, + ), isTests: false, subscriptionsMode: this.config.subscriptionsMode, filterConfig: this.config.queries.filter, @@ -374,9 +378,9 @@ export default class TONQServer { this.data, this.liteclient, ) - overrideAccountBoc() + overrideAccountBocFilter() addMasterSeqNoFilters() - overrideBocs(this.data.bocStorage) + overrideBlockBocFilter(this.data.blockBocProvider) const resolvers = createResolvers(this.data) as IResolvers ;[ infoResolvers, @@ -386,7 +390,7 @@ export default class TONQServer { rempResolvers(this.config.remp, this.logs), blockchainResolvers, masterSeqNoResolvers, - bocResolvers(this.data.bocStorage), + blockBocResolvers(this.data.blockBocProvider), ].forEach(x => assignDeep(resolvers, x)) this.addEndPoint({ path: "/graphql", @@ -435,7 +439,11 @@ export default class TONQServer { } async stop() { - await new Promise(resolve => this.server.close(() => resolve())) + await Promise.race([ + new Promise(resolve => this.server.close(() => resolve())), + new Promise(resolve => setTimeout(resolve, 50)), + ]) + this.logs.stop() for (const collection of this.data.collections) { diff --git a/src/server/tracing/trace-span.ts b/src/server/tracing/trace-span.ts index 2222adf..747082c 100644 --- a/src/server/tracing/trace-span.ts +++ b/src/server/tracing/trace-span.ts @@ -37,7 +37,7 @@ export class QTraceSpan { } span.finish() return result - } catch (error) { + } catch (error: any) { const cleaned = cleanError(error) span.log({ event: "error", "error.object": toLog(error) }) span.finish() diff --git a/src/server/tracing/tracer.ts b/src/server/tracing/tracer.ts index 81bd014..490b1c7 100644 --- a/src/server/tracing/tracer.ts +++ b/src/server/tracing/tracer.ts @@ -164,7 +164,7 @@ export class QTracer { } span.finish() return result - } catch (error) { + } catch (error: any) { const cleaned = cleanError(error) span.log({ event: "failed", payload: toLog(error) }) span.finish() diff --git a/src/server/utils.ts b/src/server/utils.ts index 4d5b2c7..1e52aa2 100644 --- a/src/server/utils.ts +++ b/src/server/utils.ts @@ -261,7 +261,7 @@ export function arraysAreEqual(a: Array, b: Array): boolean { return true } -export function toU64String(value: number): string { +export function toU64String(value: number | bigint): string { const hex = value.toString(16) return `${(hex.length - 1).toString(16)}${hex}` }