From 8c8384f79074872198310012da70ea537f492bf8 Mon Sep 17 00:00:00 2001 From: Wil Wade Date: Wed, 8 Nov 2023 09:02:08 -0500 Subject: [PATCH] E2E Testing fixes and cleanup (#1765) # Goal The goal of this PR is the final misc pieces of getting the e2e tests less flaky. Does this fix all of it? No. Most? Yes. Closes #1731 # Discussion - Correct batch error asserting - Ids used as "bad" should use close to the max - Use Immortal Eras due to issues with speed and [AncientBirthBlock](https://substrate.stackexchange.com/questions/10411/fast-block-production-causes-mortality-checking-failing-with-ancientbirthblock) - Cleanup before funding - Move assertAddNewKey - Add new assertHasMessage --- e2e/capacity/transactions.test.ts | 30 +++++----- e2e/messages/addIPFSMessage.test.ts | 12 ++-- e2e/miscellaneous/utilityBatch.test.ts | 38 +++++------- e2e/msa/msaKeyManagement.test.ts | 6 +- e2e/scaffolding/extrinsicHelpers.ts | 9 ++- e2e/scaffolding/helpers.ts | 58 ++++++++++++++----- e2e/scaffolding/rootHooks.ts | 13 +++-- .../handleItemized.test.ts | 6 +- .../handlePaginated.test.ts | 15 ++--- .../handleSignatureRequired.test.ts | 7 ++- e2e/sudo/sudo.test.ts | 3 +- 11 files changed, 112 insertions(+), 85 deletions(-) diff --git a/e2e/capacity/transactions.test.ts b/e2e/capacity/transactions.test.ts index b5859070fc..a5314b71aa 100644 --- a/e2e/capacity/transactions.test.ts +++ b/e2e/capacity/transactions.test.ts @@ -38,7 +38,9 @@ import { generatePaginatedUpsertSignaturePayloadV2, generatePaginatedDeleteSignaturePayloadV2, getCapacity, - getTestHandle + getTestHandle, + assertHasMessage, + assertAddNewKey } from "../scaffolding/helpers"; import { FeeDetails } from "@polkadot/types/interfaces"; import { ipfsCid } from "../messages/ipfs"; @@ -60,16 +62,6 @@ describe("Capacity Transactions", function () { assert.notEqual(schemaId, undefined, "setup should populate schemaId"); }); - async function assertAddNewKey(capacityKeys: KeyringPair, addKeyPayload: AddKeyData, newControlKeypair: KeyringPair) { - const addKeyPayloadCodec: Codec = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", addKeyPayload); - const ownerSig: Sr25519Signature = signPayloadSr25519(capacityKeys, addKeyPayloadCodec); - const newSig: Sr25519Signature = signPayloadSr25519(newControlKeypair, addKeyPayloadCodec); - const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, addKeyPayload); - const { eventMap } = await addPublicKeyOp.signAndSend(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "msa.PublicKeyAdded"); - } - function getCapacityFee(chainEvents: EventMap): bigint { if (chainEvents["capacity.CapacityWithdrawn"] && ExtrinsicHelper.api.events.capacity.CapacityWithdrawn.is(chainEvents["capacity.CapacityWithdrawn"])) { @@ -118,6 +110,7 @@ describe("Capacity Transactions", function () { before(async function () { capacityKeys = createKeys("CapacityKeys"); capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); + // Stake enough for all transactions await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, stakedForMsa)); }) @@ -186,11 +179,11 @@ describe("Capacity Transactions", function () { assertEvent(eventMap, "capacity.CapacityWithdrawn"); assertEvent(eventMap, "msa.DelegationGranted"); - let fee = getCapacityFee(eventMap); + const fee = getCapacityFee(eventMap); // assuming no other txns charged against capacity (b/c of async tests), this should be the maximum amount left. const maximumExpectedRemaining = stakedForMsa / TokenPerCapacity - fee - let remaining = capacityStaked.remainingCapacity.toBigInt(); + const remaining = capacityStaked.remainingCapacity.toBigInt(); assert(remaining <= maximumExpectedRemaining, `expected ${remaining} to be <= ${maximumExpectedRemaining}`); assert.equal(capacityStaked.totalTokensStaked.toBigInt(), stakedForMsa); assert.equal(capacityStaked.totalCapacityIssued.toBigInt(), stakedForMsa / TokenPerCapacity); @@ -209,6 +202,7 @@ describe("Capacity Transactions", function () { beforeEach(async function () { starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); + // Stake each time so that we always have enough capacity to do the call await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); @@ -240,8 +234,7 @@ describe("Capacity Transactions", function () { page_size: 999 } ); - const response: MessageResponse = get.content[get.content.length - 1]; - assert.equal(response.payload, "0xdeadbeef", "payload should be 0xdeadbeef"); + assertHasMessage(get, x => x.payload.isSome && x.payload.toString() === "0xdeadbeef"); }); }); @@ -254,8 +247,10 @@ describe("Capacity Transactions", function () { before(async function () { capacityKeys = createKeys("CapacityKeys"); capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); - }) + }); + beforeEach(async function () { + // Stake each time so that we always have enough capacity to do the call await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); @@ -480,7 +475,8 @@ describe("Capacity Transactions", function () { before(async function () { capacityKeys = createKeys("CapacityKeys"); capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); - }) + }); + it("successfully pays with Capacity for eligible transaction - claimHandle", async function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); diff --git a/e2e/messages/addIPFSMessage.test.ts b/e2e/messages/addIPFSMessage.test.ts index 0a44ee4fae..08b66c4f9a 100644 --- a/e2e/messages/addIPFSMessage.test.ts +++ b/e2e/messages/addIPFSMessage.test.ts @@ -5,7 +5,7 @@ import { base32 } from 'multiformats/bases/base32'; import { CID } from 'multiformats/cid' import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; import assert from "assert"; -import { createAndFundKeypair } from "../scaffolding/helpers"; +import { assertHasMessage, createAndFundKeypair } from "../scaffolding/helpers"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { u16 } from "@polkadot/types"; import { MessageResponse } from "@frequency-chain/api-augment/interfaces"; @@ -102,9 +102,10 @@ describe("Add Offchain Message", function () { it("should successfully retrieve added message and returned CID should have Base32 encoding", async function () { const f = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(schemaId, { from_block: starting_block, from_index: 0, to_block: starting_block + 999, page_size: 999 }); - const response: MessageResponse = f.content[f.content.length - 1]; - const cid = Buffer.from(response.cid.unwrap()).toString(); - assert.equal(cid, ipfs_cid_32, 'returned CID should match base32-encoded CID'); + assertHasMessage(f, x => { + const cid = x.cid.isSome && Buffer.from(x.cid.unwrap()).toString(); + return cid === ipfs_cid_32; + }); }) describe("Add OnChain Message and successfully retrieve it", function () { @@ -124,8 +125,7 @@ describe("Add Offchain Message", function () { page_size: 999 } ); - const response: MessageResponse = get.content[get.content.length - 1]; - assert.equal(response.payload, "0xdeadbeef", "payload should be 0xdeadbeef"); + assertHasMessage(get, x => x.payload.isSome && x.payload.toString() === "0xdeadbeef"); }); }); }); diff --git a/e2e/miscellaneous/utilityBatch.test.ts b/e2e/miscellaneous/utilityBatch.test.ts index a8fe66de2f..d61a6c9e98 100644 --- a/e2e/miscellaneous/utilityBatch.test.ts +++ b/e2e/miscellaneous/utilityBatch.test.ts @@ -1,7 +1,7 @@ import assert from "assert"; import { KeyringPair } from "@polkadot/keyring/types"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { createAndFundKeypair, getNonce } from "../scaffolding/helpers"; +import { DOLLARS, createAndFundKeypair } from "../scaffolding/helpers"; import { ApiTypes, SubmittableExtrinsic } from "@polkadot/api/types"; import { getFundingSource } from "../scaffolding/funding"; @@ -11,10 +11,9 @@ describe("Utility Batch Filtering", function () { const fundingSource = getFundingSource("misc-util-batch"); - before(async function () { - let nonce = await getNonce(fundingSource); - sender = await createAndFundKeypair(fundingSource, 50_000_000n, "utility-sender", nonce++); - recipient = await createAndFundKeypair(fundingSource, 50_000_000n, "utility-recipient", nonce++); + beforeEach(async function () { + sender = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-sender'); + recipient = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-recipient'); }); it("should successfully execute ✅ batch with allowed calls", async function () { @@ -44,11 +43,10 @@ describe("Utility Batch Filtering", function () { const batchAll = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); try { await batchAll.fundAndSend(fundingSource); + assert.fail("batchAll should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, " batchAll should return an error"); + assert.notEqual(err, undefined, " batchAll should return an error"); } - assert.notEqual(error, undefined, " batchAll should return an error"); }); it("should fail to execute ❌ batch with disallowed calls", async function () { @@ -88,14 +86,12 @@ describe("Utility Batch Filtering", function () { const badBatch: SubmittableExtrinsic[] = []; badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); it("should fail to execute ❌ batch with `Pays::Yes` `create_provider`call blocked by Frequency", async function () { @@ -103,14 +99,12 @@ describe("Utility Batch Filtering", function () { const badBatch: SubmittableExtrinsic[] = []; badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider("I am a ba(tch)d provider")) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); it("should fail to execute ❌ batch with `Pays::Yes` `create_schema` call blocked by Frequency", async function () { @@ -118,14 +112,12 @@ describe("Utility Batch Filtering", function () { const badBatch: SubmittableExtrinsic[] = []; badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider("I am a ba(tch)d provider")) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); it("should fail to execute ❌ batch with nested batch", async function () { @@ -136,13 +128,11 @@ describe("Utility Batch Filtering", function () { innerBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) nestedBatch.push(ExtrinsicHelper.api.tx.utility.batch(innerBatch)) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, nestedBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); }); diff --git a/e2e/msa/msaKeyManagement.test.ts b/e2e/msa/msaKeyManagement.test.ts index 10cad0bb8e..ec22792fa7 100644 --- a/e2e/msa/msaKeyManagement.test.ts +++ b/e2e/msa/msaKeyManagement.test.ts @@ -7,6 +7,8 @@ import { u64 } from "@polkadot/types"; import { Codec } from "@polkadot/types/types"; import { getFundingSource } from "../scaffolding/funding"; +const maxU64 = 18_446_744_073_709_551_615n; + describe("MSA Key management", function () { const fundingSource = getFundingSource("msa-key-management"); @@ -75,7 +77,7 @@ describe("MSA Key management", function () { it("should fail to add public key if origin does not own MSA (NotMsaOwner)", async function () { const newPayload = await generateAddKeyPayload({ ...defaultPayload, - msaId: new u64(ExtrinsicHelper.api.registry, 999), // If we create more than 999 MSAs in our test suites, this will fail + msaId: new u64(ExtrinsicHelper.api.registry, maxU64), }); addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); ownerSig = signPayloadSr25519(keys, addKeyData); @@ -155,7 +157,7 @@ describe("MSA Key management", function () { assert.notEqual(event, undefined, 'should have added public key'); // Cleanup - await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend()); + await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend('current')); }); }); diff --git a/e2e/scaffolding/extrinsicHelpers.ts b/e2e/scaffolding/extrinsicHelpers.ts index bbdcb617b2..039d8aac78 100644 --- a/e2e/scaffolding/extrinsicHelpers.ts +++ b/e2e/scaffolding/extrinsicHelpers.ts @@ -130,7 +130,8 @@ export class Extrinsic { // If we learn a transaction has an error status (this does NOT include RPC errors) // Then throw an error @@ -151,7 +152,8 @@ export class Extrinsic status.isInBlock || status.isFinalized), this.parseResult(this.event), )) @@ -159,7 +161,8 @@ export class Extrinsic status.isInBlock || status.isFinalized), this.parseResult(this.event), )) diff --git a/e2e/scaffolding/helpers.ts b/e2e/scaffolding/helpers.ts index 64c3887147..89b38f2bd8 100644 --- a/e2e/scaffolding/helpers.ts +++ b/e2e/scaffolding/helpers.ts @@ -1,7 +1,7 @@ import { Keyring } from "@polkadot/api"; import { KeyringPair } from "@polkadot/keyring/types"; import { u16, u32, u64, Option } from "@polkadot/types"; -import type { PalletCapacityCapacityDetails } from "@polkadot/types/lookup"; +import type { FrameSystemAccountInfo, PalletCapacityCapacityDetails } from "@polkadot/types/lookup"; import { Codec } from "@polkadot/types/types"; import { u8aToHex, u8aWrapBytes } from "@polkadot/util"; import { mnemonicGenerate } from '@polkadot/util-crypto'; @@ -14,7 +14,7 @@ import { ItemizedSignaturePayload, ItemizedSignaturePayloadV2, PaginatedDeleteSignaturePayload, PaginatedDeleteSignaturePayloadV2, PaginatedUpsertSignaturePayload, PaginatedUpsertSignaturePayloadV2 } from "./extrinsicHelpers"; -import { HandleResponse, MessageSourceId, PageHash } from "@frequency-chain/api-augment/interfaces"; +import { BlockPaginationResponseMessage, HandleResponse, MessageResponse, MessageSourceId, PageHash } from "@frequency-chain/api-augment/interfaces"; import assert from "assert"; import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; @@ -165,16 +165,24 @@ export function createKeys(name: string = 'first pair'): KeyringPair { return keypair; } +function canDrainAccount(info: FrameSystemAccountInfo): Boolean { + return !info.isEmpty + && info.data.free.toNumber() > 1_500_000 // ~Cost to do the transfer + && info.data.reserved.toNumber() < 1 + && info.data.frozen.toNumber() < 1; +} + export async function drainKeys(keyPairs: KeyringPair[], dest: string) { try { - await Promise.allSettled(keyPairs.map(async (keypair) => { - const info = await ExtrinsicHelper.getAccountInfo(keypair.address); - if (!info.isEmpty && info.data.free.toNumber() > 0) { - await ExtrinsicHelper.emptyAccount(keypair, dest).signAndSend(); - } - })); + await Promise.all( + keyPairs.map(async (keypair) => { + const info = await ExtrinsicHelper.getAccountInfo(keypair.address); + // Only drain keys that can be + if (canDrainAccount(info)) await ExtrinsicHelper.emptyAccount(keypair, dest).signAndSend(); + }) + ); } catch (e) { - console.log("Error draining accounts: ", e); + console.log('Error draining accounts: ', e); } } @@ -210,8 +218,8 @@ export function log(...args: any[]) { } } -export async function createProviderKeysAndId(source: KeyringPair): Promise<[KeyringPair, u64]> { - const providerKeys = await createAndFundKeypair(source); +export async function createProviderKeysAndId(source: KeyringPair, amount?: bigint): Promise<[KeyringPair, u64]> { + const providerKeys = await createAndFundKeypair(source, amount); await ExtrinsicHelper.createMsa(providerKeys).fundAndSend(source); const createProviderOp = ExtrinsicHelper.createProvider(providerKeys, "PrivateProvider"); const { target: providerEvent } = await createProviderOp.fundAndSend(source); @@ -219,8 +227,8 @@ export async function createProviderKeysAndId(source: KeyringPair): Promise<[Key return [providerKeys, providerId]; } -export async function createDelegator(source: KeyringPair): Promise<[KeyringPair, u64]> { - let keys = await createAndFundKeypair(source); +export async function createDelegator(source: KeyringPair, amount?: bigint): Promise<[KeyringPair, u64]> { + let keys = await createAndFundKeypair(source, amount); const createMsa = ExtrinsicHelper.createMsa(keys); const { target: msaCreatedEvent } = await createMsa.fundAndSend(source); const delegatorMsaId = msaCreatedEvent?.data.msaId || new u64(ExtrinsicHelper.api.registry, 0); @@ -440,3 +448,27 @@ export function assertEvent(events: EventMap, eventName: string) { export function assertExtrinsicSuccess(eventMap: EventMap) { assert.notEqual(eventMap["system.ExtrinsicSuccess"], undefined); } + +export function assertHasMessage(response: BlockPaginationResponseMessage, testFn: (x: MessageResponse) => Boolean) { + const messages = response.content; + assert(messages.length > 0, "Expected some messages, but found none."); + + const found = messages.find(testFn); + + if (found) { + assert.notEqual(found, undefined); + } else { + const allPayloads = messages.map(x => x.payload.toString()); + assert.fail(`Unable to find message in response (length: ${messages.length}, Payloads: ${allPayloads.join(", ")})`); + } +} + +export async function assertAddNewKey(capacityKeys: KeyringPair, addKeyPayload: AddKeyData, newControlKeypair: KeyringPair) { + const addKeyPayloadCodec: Codec = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", addKeyPayload); + const ownerSig: Sr25519Signature = signPayloadSr25519(capacityKeys, addKeyPayloadCodec); + const newSig: Sr25519Signature = signPayloadSr25519(newControlKeypair, addKeyPayloadCodec); + const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, addKeyPayload); + const { eventMap } = await addPublicKeyOp.signAndSend(); + assertEvent(eventMap, "system.ExtrinsicSuccess"); + assertEvent(eventMap, "msa.PublicKeyAdded"); +} diff --git a/e2e/scaffolding/rootHooks.ts b/e2e/scaffolding/rootHooks.ts index e838a94b41..3bfbc83753 100644 --- a/e2e/scaffolding/rootHooks.ts +++ b/e2e/scaffolding/rootHooks.ts @@ -15,20 +15,21 @@ export const mochaHooks = { try { await cryptoWaitReady(); await ExtrinsicHelper.initialize(); - } catch(e) { - console.error("Failed to run beforeAll root hook: ", e); + } catch (e) { + console.error('Failed to run beforeAll root hook: ', this.test.parent.suites[0].title, e); } }, async afterAll() { + const testSuite = this.test.parent.suites[0].title; + console.log("Starting ROOT hook shutdown", testSuite) try { // Any key created using helpers `createKeys` is kept in the module // then any value remaining is drained here at the end const rootAddress = getRootFundingSource().keys.address; await drainFundedKeys(rootAddress); - await ExtrinsicHelper.api.disconnect(); - await ExtrinsicHelper.apiPromise.disconnect(); - } catch(e) { - console.error("Failed to run afterAll root hook: ", e); + console.log("ENDING ROOT hook shutdown", testSuite) + } catch (e) { + console.error('Failed to run afterAll root hook: ', testSuite, e); } } } diff --git a/e2e/stateful-pallet-storage/handleItemized.test.ts b/e2e/stateful-pallet-storage/handleItemized.test.ts index 41bb45c939..d2873297a0 100644 --- a/e2e/stateful-pallet-storage/handleItemized.test.ts +++ b/e2e/stateful-pallet-storage/handleItemized.test.ts @@ -20,17 +20,17 @@ describe("📗 Stateful Pallet Storage", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); // Create a schema to allow delete actions const createSchemaDeletable = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); - const { target: eventDeletable } = await createSchemaDeletable.fundAndSend(fundingSource); + const { target: eventDeletable } = await createSchemaDeletable.signAndSend(); schemaId_deletable = eventDeletable!.data.schemaId; // Create non supported schema const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); - const { target: event2 } = await createSchema2.fundAndSend(fundingSource); + const { target: event2 } = await createSchema2.signAndSend(); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); schemaId_unsupported = event2!.data.schemaId; // Create a MSA for the delegator and delegate to the provider diff --git a/e2e/stateful-pallet-storage/handlePaginated.test.ts b/e2e/stateful-pallet-storage/handlePaginated.test.ts index 7b3930b8dd..69cf897a3d 100644 --- a/e2e/stateful-pallet-storage/handlePaginated.test.ts +++ b/e2e/stateful-pallet-storage/handlePaginated.test.ts @@ -1,7 +1,7 @@ // E2E tests for pallets/stateful-pallet-storage/handlePaginated.ts import "@frequency-chain/api-augment"; import assert from "assert"; -import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash, createMsa} from "../scaffolding/helpers"; +import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash, createMsa, DOLLARS} from "../scaffolding/helpers"; import { KeyringPair } from "@polkadot/keyring/types"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { AVRO_CHAT_MESSAGE } from "./fixtures/itemizedSchemaType"; @@ -9,6 +9,8 @@ import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfac import { Bytes, u16, u64 } from "@polkadot/types"; import { getFundingSource } from "../scaffolding/funding"; +const badSchemaId = 65_534; + describe("📗 Stateful Pallet Storage", function () { const fundingSource = getFundingSource("stateful-storage-handle-paginated"); @@ -21,18 +23,18 @@ describe("📗 Stateful Pallet Storage", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); // Create a schema for Paginated PayloadLocation const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); - const { target: event } = await createSchema.fundAndSend(fundingSource); + const { target: event } = await createSchema.signAndSend(); schemaId = event!.data.schemaId; // Create non supported schema const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); - const { target: event2 } = await createSchema2.fundAndSend(fundingSource); + const { target: event2 } = await createSchema2.signAndSend(); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); schemaId_unsupported = event2!.data.schemaId; @@ -81,7 +83,7 @@ describe("📗 Stateful Pallet Storage", function () { let page_id = 0; let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - let fake_schema_id = new u16(ExtrinsicHelper.api.registry, 999); + let fake_schema_id = new u16(ExtrinsicHelper.api.registry, badSchemaId); let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, fake_schema_id, msa_id, page_id, payload_1, target_hash); await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { name: 'InvalidSchemaId', @@ -130,9 +132,8 @@ describe("📗 Stateful Pallet Storage", function () { describe("Paginated Storage Removal Negative Tests 😊/😥", function () { it("🛑 should fail call to remove page with invalid schemaId", async function () { - let fake_schema_id = 999; let page_id = 0; - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, fake_schema_id, msa_id, page_id, 0); + let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, badSchemaId, msa_id, page_id, 0); await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { name: 'InvalidSchemaId', section: 'statefulStorage', diff --git a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts index 5006dd155a..1bb759bbb4 100644 --- a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts +++ b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts @@ -2,6 +2,7 @@ import "@frequency-chain/api-augment"; import assert from "assert"; import { + DOLLARS, createDelegator, createProviderKeysAndId, generateItemizedSignaturePayload, @@ -33,18 +34,18 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); // Create a schema for Itemized PayloadLocation const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); - const { target: event } = await createSchema.fundAndSend(fundingSource); + const { target: event } = await createSchema.signAndSend(); itemizedSchemaId = event!.data.schemaId; // Create a schema for Paginated PayloadLocation const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); - const { target: event2 } = await createSchema2.fundAndSend(fundingSource); + const { target: event2 } = await createSchema2.signAndSend(); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); paginatedSchemaId = event2!.data.schemaId; diff --git a/e2e/sudo/sudo.test.ts b/e2e/sudo/sudo.test.ts index 28b5503dfe..55cc2b0dde 100644 --- a/e2e/sudo/sudo.test.ts +++ b/e2e/sudo/sudo.test.ts @@ -10,6 +10,7 @@ import { getSudo, getFundingSource } from "../scaffolding/funding"; import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; import { Bytes, u16 } from "@polkadot/types"; import { + DOLLARS, createDelegatorAndDelegation, createProviderKeysAndId, getCurrentItemizedHash, @@ -85,7 +86,7 @@ describe("Sudo required", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys");