diff --git a/.github/workflows/zxcron-extended-test-suite.yaml b/.github/workflows/zxcron-extended-test-suite.yaml index c75b51a70ff3..20057643f658 100644 --- a/.github/workflows/zxcron-extended-test-suite.yaml +++ b/.github/workflows/zxcron-extended-test-suite.yaml @@ -64,18 +64,33 @@ jobs: - name: Check for tags id: check-tags-exist run: | - XTS_COMMIT=$(git rev-list -n 1 ${XTS_CANDIDATE_TAG}) + # Check if the tag exists and if so grab its commit id set +e - git branch --contains ${XTS_COMMIT} | grep --quiet develop >/dev/null 2>&1 + XTS_COMMIT=$(git rev-list -n 1 "${XTS_CANDIDATE_TAG}") >/dev/null 2>&1 + XTS_COMMIT_FOUND="${?}" + set -e + + # Cancel out if the tag does not exist + if [[ "${XTS_COMMIT_FOUND}" -ne 0 ]]; then + gh run cancel ${{ github.run_id }} + fi + + # Check if the tag exists on the develop branch + set +e + git branch --contains "${XTS_COMMIT}" | grep --quiet develop >/dev/null 2>&1 BRANCH_ON_DEVELOP="${?}" set -e - if [[ -n "${XTS_COMMIT}" && "${BRANCH_ON_DEVELOP}" -eq 0 ]]; then + + # If the tag exists on the Develop Branch set the output variables as appropriate + # Otherwise cancel out + if [[ "${BRANCH_ON_DEVELOP}" -eq 0 ]]; then echo "xts-tag-exists=true" >> $GITHUB_OUTPUT echo "xts-tag-commit=${XTS_COMMIT}" >> $GITHUB_OUTPUT echo "### Commit has been tagged as an XTS-Candidate" >> $GITHUB_STEP_SUMMARY echo "xts-tag-commit=${XTS_COMMIT}" >> $GITHUB_STEP_SUMMARY - git push --delete origin ${XTS_CANDIDATE_TAG} - git tag -d ${XTS_CANDIDATE_TAG} + + git push --delete origin "${XTS_CANDIDATE_TAG}" + git tag -d "${XTS_CANDIDATE_TAG}" else gh run cancel ${{ github.run_id }} fi @@ -152,14 +167,13 @@ jobs: name: Tag as XTS-Passing runs-on: network-node-linux-medium needs: -# - abbreviated-panel + - abbreviated-panel - extended-test-suite - fetch-xts-candidate -# - hedera-node-jrs-panel -# if: ${{ needs.abbreviated-panel.result == 'success' || -# needs.extended-test-suite.result == 'success' || -# needs.hedera-node-jrs-panel.result == 'success' }} - if: ${{ needs.extended-test-suite.result == 'success' }} + - hedera-node-jrs-panel + if: ${{ needs.abbreviated-panel.result == 'success' || + needs.extended-test-suite.result == 'success' || + needs.hedera-node-jrs-panel.result == 'success' }} steps: - name: Harden Runner uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 diff --git a/.github/workflows/zxcron-promote-build-candidate.yaml b/.github/workflows/zxcron-promote-build-candidate.yaml index 52b81c7dc04f..47543f5ce29e 100644 --- a/.github/workflows/zxcron-promote-build-candidate.yaml +++ b/.github/workflows/zxcron-promote-build-candidate.yaml @@ -85,7 +85,7 @@ jobs: - name: Checkout Tagged Code id: checkout-tagged-code - if: ${{ needs.determine-build-candidate.build-candidate-exists == 'true' }} + if: ${{ needs.determine-build-candidate.outputs.build-candidate-exists == 'true' }} uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1 with: fetch-depth: '0' diff --git a/.github/workflows/zxf-prepare-extended-test-suite.yaml b/.github/workflows/zxf-prepare-extended-test-suite.yaml index abfb0dad3d6b..e3be9509ca48 100644 --- a/.github/workflows/zxf-prepare-extended-test-suite.yaml +++ b/.github/workflows/zxf-prepare-extended-test-suite.yaml @@ -82,8 +82,19 @@ jobs: # move the tag if successful - name: Tag Code and push run: | - git push --delete origin "${XTS_CANDIDATE_TAG}" - git tag --delete "${XTS_CANDIDATE_TAG}" + # Check if the tag exists + set +e + git rev-list -n 1 "${XTS_CANDIDATE_TAG}" >/dev/null 2>&1 + XTS_COMMIT_FOUND="${?}" + set -e + + # Delete the tag if it does exist + if [[ "${XTS_COMMIT_FOUND}" -eq 0 ]]; then + git push --delete origin "${XTS_CANDIDATE_TAG}" + git tag -d "${XTS_CANDIDATE_TAG}" + fi + + # Create the new tag git tag --annotate "${XTS_CANDIDATE_TAG}" --message "chore: tagging commit for XTS promotion" git push --set-upstream origin --tags diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/DispatchType.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/DispatchType.java index f4dbfa52ec46..95f207776e67 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/DispatchType.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/gas/DispatchType.java @@ -61,6 +61,7 @@ public enum DispatchType { TOKEN_INFO(HederaFunctionality.TOKEN_GET_INFO, DEFAULT), UPDATE_TOKEN_CUSTOM_FEES(HederaFunctionality.TOKEN_FEE_SCHEDULE_UPDATE, DEFAULT), TOKEN_AIRDROP(HederaFunctionality.TOKEN_AIRDROP, DEFAULT), + TOKEN_CLAIM_AIRDROP(HederaFunctionality.TOKEN_CLAIM_AIRDROP, DEFAULT), TOKEN_REJECT(HederaFunctionality.TOKEN_REJECT, TOKEN_FUNGIBLE_COMMON); private final HederaFunctionality functionality; diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/processors/HtsTranslatorsModule.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/processors/HtsTranslatorsModule.java index 3816ab4da09e..2c869032a381 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/processors/HtsTranslatorsModule.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/processors/HtsTranslatorsModule.java @@ -23,6 +23,7 @@ import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.associations.AssociationsTranslator; import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.balanceof.BalanceOfTranslator; import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.burn.BurnTranslator; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropTranslator; import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.create.CreateTranslator; import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.customfees.TokenCustomFeesTranslator; import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.decimals.DecimalsTranslator; @@ -450,6 +451,15 @@ static CallTranslator provideTokenAirdropTranslator( return translator; } + @Provides + @Singleton + @IntoSet + @Named("HtsTranslators") + static CallTranslator provideTokenClaimAirdropDecoder( + @NonNull final TokenClaimAirdropTranslator translator) { + return translator; + } + @Provides @Singleton @IntoSet diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropDecoder.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropDecoder.java new file mode 100644 index 000000000000..433dafee8d5c --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropDecoder.java @@ -0,0 +1,146 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.PENDING_AIRDROP_ID_LIST_TOO_LONG; +import static com.hedera.node.app.service.contract.impl.utils.ConversionUtils.asTokenId; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; + +import com.esaulpaugh.headlong.abi.Address; +import com.esaulpaugh.headlong.abi.Tuple; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.NftID; +import com.hedera.hapi.node.base.PendingAirdropId; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.token.TokenClaimAirdropTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.HtsCallAttempt; +import com.hedera.node.config.data.TokensConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Arrays; +import javax.inject.Inject; +import javax.inject.Singleton; + +@Singleton +public class TokenClaimAirdropDecoder { + + // Tuple indexes + private static final int TRANSFER_LIST = 0; + private static final int SENDER = 0; + private static final int RECEIVER = 1; + private static final int TOKEN = 2; + private static final int SERIAL = 3; + private static final int HRC_SENDER = 0; + private static final int HRC_SERIAL = 1; + + @Inject + public TokenClaimAirdropDecoder() { + // Dagger2 + } + + public TransactionBody decodeTokenClaimAirdrop(@NonNull final HtsCallAttempt attempt) { + final var call = TokenClaimAirdropTranslator.CLAIM_AIRDROP.decodeCall(attempt.inputBytes()); + final var maxPendingAirdropsToClaim = + attempt.configuration().getConfigData(TokensConfig.class).maxAllowedPendingAirdropsToClaim(); + validateFalse(((Tuple[]) call.get(0)).length > maxPendingAirdropsToClaim, PENDING_AIRDROP_ID_LIST_TOO_LONG); + + final var transferList = (Tuple[]) call.get(TRANSFER_LIST); + final var pendingAirdrops = new ArrayList(); + Arrays.stream(transferList).forEach(transfer -> { + final var senderAddress = (Address) transfer.get(SENDER); + final var receiverAddress = (Address) transfer.get(RECEIVER); + final var tokenAddress = (Address) transfer.get(TOKEN); + final var serial = (long) transfer.get(SERIAL); + + final var senderId = attempt.addressIdConverter().convert(senderAddress); + final var receiverId = attempt.addressIdConverter().convert(receiverAddress); + final var tokenId = asTokenId(tokenAddress); + + final var token = attempt.enhancement().nativeOperations().getToken(tokenId.tokenNum()); + validateTrue(token != null, INVALID_TOKEN_ID); + if (token.tokenType().equals(TokenType.FUNGIBLE_COMMON)) { + pendingAirdrops.add(pendingFTAirdrop(senderId, receiverId, tokenId)); + } else { + pendingAirdrops.add(pendingNFTAirdrop(senderId, receiverId, tokenId, serial)); + } + }); + + return TransactionBody.newBuilder() + .tokenClaimAirdrop(TokenClaimAirdropTransactionBody.newBuilder().pendingAirdrops(pendingAirdrops)) + .build(); + } + + public TransactionBody decodeHrcClaimAirdropFt(@NonNull final HtsCallAttempt attempt) { + final var call = TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_FT.decodeCall(attempt.inputBytes()); + + // As the Token Claim is an operation for the receiver of an Airdrop, + // hence the `transaction sender` in the HRC scenario is in reality the `Airdrop receiver`. + final var receiverId = attempt.senderId(); + final var senderAddress = (Address) call.get(HRC_SENDER); + final var token = attempt.redirectTokenId(); + validateTrue(token != null, INVALID_TOKEN_ID); + final var senderId = attempt.addressIdConverter().convert(senderAddress); + + return TransactionBody.newBuilder() + .tokenClaimAirdrop(TokenClaimAirdropTransactionBody.newBuilder() + .pendingAirdrops(pendingFTAirdrop(senderId, receiverId, token))) + .build(); + } + + public TransactionBody decodeHrcClaimAirdropNft(@NonNull final HtsCallAttempt attempt) { + final var call = TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_NFT.decodeCall(attempt.inputBytes()); + + // As the Token Claim is an operation for the receiver of an Airdrop, + // hence the `transaction sender` in the HRC scenario is in reality the `Airdrop receiver`. + final var receiverId = attempt.senderId(); + final var senderAddress = (Address) call.get(HRC_SENDER); + final var serial = (long) call.get(HRC_SERIAL); + final var token = attempt.redirectTokenId(); + validateTrue(token != null, INVALID_TOKEN_ID); + final var senderId = attempt.addressIdConverter().convert(senderAddress); + + return TransactionBody.newBuilder() + .tokenClaimAirdrop(TokenClaimAirdropTransactionBody.newBuilder() + .pendingAirdrops(pendingNFTAirdrop(senderId, receiverId, token, serial))) + .build(); + } + + private PendingAirdropId pendingFTAirdrop( + @NonNull final AccountID senderId, @NonNull final AccountID receiverId, @NonNull final TokenID tokenId) { + return PendingAirdropId.newBuilder() + .senderId(senderId) + .receiverId(receiverId) + .fungibleTokenType(tokenId) + .build(); + } + + private PendingAirdropId pendingNFTAirdrop( + @NonNull final AccountID senderId, + @NonNull final AccountID receiverId, + @NonNull final TokenID tokenId, + final long serial) { + return PendingAirdropId.newBuilder() + .senderId(senderId) + .receiverId(receiverId) + .nonFungibleToken(NftID.newBuilder().tokenId(tokenId).serialNumber(serial)) + .build(); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropTranslator.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropTranslator.java new file mode 100644 index 000000000000..325b7ec4a20d --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/com/hedera/node/app/service/contract/impl/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropTranslator.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops; + +import com.esaulpaugh.headlong.abi.Function; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.contract.impl.exec.gas.DispatchType; +import com.hedera.node.app.service.contract.impl.exec.gas.SystemContractGasCalculator; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.common.AbstractCallTranslator; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.common.Call; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.DispatchForResponseCodeHtsCall; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.HtsCallAttempt; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.ReturnTypes; +import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; +import com.hedera.node.config.data.ContractsConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; + +public class TokenClaimAirdropTranslator extends AbstractCallTranslator { + public static final Function CLAIM_AIRDROP = + new Function("claimAirdrops((address,address,address,int64)[])", ReturnTypes.INT_64); + public static final Function HRC_CLAIM_AIRDROP_FT = new Function("claimAirdropFT(address)", ReturnTypes.INT_64); + public static final Function HRC_CLAIM_AIRDROP_NFT = + new Function("claimAirdropNFT(address,int64)", ReturnTypes.INT_64); + + private final TokenClaimAirdropDecoder decoder; + + @Inject + public TokenClaimAirdropTranslator(@NonNull final TokenClaimAirdropDecoder decoder) { + this.decoder = decoder; + } + + @Override + public boolean matches(@NonNull final HtsCallAttempt attempt) { + final var claimAirdropEnabled = + attempt.configuration().getConfigData(ContractsConfig.class).systemContractClaimAirdropsEnabled(); + return attempt.isTokenRedirect() + ? attempt.isSelectorIfConfigEnabled(HRC_CLAIM_AIRDROP_FT, claimAirdropEnabled) + || attempt.isSelectorIfConfigEnabled(HRC_CLAIM_AIRDROP_NFT, claimAirdropEnabled) + : attempt.isSelectorIfConfigEnabled(CLAIM_AIRDROP, claimAirdropEnabled); + } + + @Override + public Call callFrom(@NonNull final HtsCallAttempt attempt) { + return new DispatchForResponseCodeHtsCall( + attempt, + attempt.isSelector(CLAIM_AIRDROP) ? bodyForClassic(attempt) : bodyForHRC(attempt), + TokenClaimAirdropTranslator::gasRequirement); + } + + public static long gasRequirement( + @NonNull final TransactionBody body, + @NonNull final SystemContractGasCalculator systemContractGasCalculator, + @NonNull final HederaWorldUpdater.Enhancement enhancement, + @NonNull final AccountID payerId) { + return systemContractGasCalculator.gasRequirement(body, DispatchType.TOKEN_CLAIM_AIRDROP, payerId); + } + + private TransactionBody bodyForClassic(@NonNull final HtsCallAttempt attempt) { + return decoder.decodeTokenClaimAirdrop(attempt); + } + + private TransactionBody bodyForHRC(@NonNull final HtsCallAttempt attempt) { + if (attempt.isSelector(HRC_CLAIM_AIRDROP_FT)) { + return decoder.decodeHrcClaimAirdropFt(attempt); + } else { + return decoder.decodeHrcClaimAirdropNft(attempt); + } + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropDecoderTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropDecoderTest.java new file mode 100644 index 000000000000..5b08ccfa2231 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropDecoderTest.java @@ -0,0 +1,330 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.test.exec.systemcontracts.hts.claimairdrops; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.PENDING_AIRDROP_ID_LIST_TOO_LONG; +import static com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropTranslator.CLAIM_AIRDROP; +import static com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_FT; +import static com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_NFT; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.FUNGIBLE_TOKEN; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.FUNGIBLE_TOKEN_HEADLONG_ADDRESS; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.FUNGIBLE_TOKEN_ID; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.NON_FUNGIBLE_TOKEN; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.NON_FUNGIBLE_TOKEN_HEADLONG_ADDRESS; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.NON_FUNGIBLE_TOKEN_ID; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.OWNER_ACCOUNT_AS_ADDRESS; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.OWNER_ID; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.SENDER_ID; +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.asHeadlongAddress; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.lenient; + +import com.esaulpaugh.headlong.abi.Tuple; +import com.hedera.hapi.node.base.NftID; +import com.hedera.hapi.node.base.PendingAirdropId; +import com.hedera.node.app.service.contract.impl.exec.scope.HederaNativeOperations; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.AddressIdConverter; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.HtsCallAttempt; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropDecoder; +import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater.Enhancement; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.data.TokensConfig; +import com.swirlds.config.api.Configuration; +import java.util.ArrayList; +import org.apache.tuweni.bytes.Bytes; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class TokenClaimAirdropDecoderTest { + @Mock + private HtsCallAttempt attempt; + + @Mock + private AddressIdConverter addressIdConverter; + + @Mock + private Enhancement enhancement; + + @Mock + private HederaNativeOperations nativeOperations; + + @Mock + private Configuration configuration; + + @Mock + private TokensConfig tokensConfig; + + private TokenClaimAirdropDecoder subject; + + @BeforeEach + void setup() { + subject = new TokenClaimAirdropDecoder(); + + lenient().when(attempt.addressIdConverter()).thenReturn(addressIdConverter); + } + + @Test + void claimAirdropDecoder1FTTest() { + // given: + given(attempt.configuration()).willReturn(configuration); + given(attempt.enhancement()).willReturn(enhancement); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + given(configuration.getConfigData(TokensConfig.class)).willReturn(tokensConfig); + given(tokensConfig.maxAllowedPendingAirdropsToClaim()).willReturn(10); + given(nativeOperations.getToken(FUNGIBLE_TOKEN_ID.tokenNum())).willReturn(FUNGIBLE_TOKEN); + given(addressIdConverter.convert(asHeadlongAddress(OWNER_ID.accountNum()))) + .willReturn(OWNER_ID); + given(addressIdConverter.convert(asHeadlongAddress(SENDER_ID.accountNum()))) + .willReturn(SENDER_ID); + + final var encoded = Bytes.wrapByteBuffer(CLAIM_AIRDROP.encodeCall(Tuple.singleton(new Tuple[] { + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L) + }))); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + + var expected = new ArrayList(); + expected.add(PendingAirdropId.newBuilder() + .senderId(SENDER_ID) + .receiverId(OWNER_ID) + .fungibleTokenType(FUNGIBLE_TOKEN_ID) + .build()); + // when: + final var decoded = subject.decodeTokenClaimAirdrop(attempt); + + // then: + assertNotNull(decoded.tokenClaimAirdrop()); + assertEquals(expected, decoded.tokenClaimAirdrop().pendingAirdrops()); + } + + @Test + void failsIfPendingAirdropsAboveLimit() { + // given: + given(attempt.configuration()).willReturn(configuration); + given(configuration.getConfigData(TokensConfig.class)).willReturn(tokensConfig); + given(tokensConfig.maxAllowedPendingAirdropsToClaim()).willReturn(10); + + final var encoded = Bytes.wrapByteBuffer(CLAIM_AIRDROP.encodeCall(Tuple.singleton(new Tuple[] { + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L), + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L) + }))); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + + assertThrows(HandleException.class, () -> subject.decodeTokenClaimAirdrop(attempt)); + } + + @Test + void failsIfTokenIsNull() { + // given: + given(attempt.configuration()).willReturn(configuration); + given(attempt.enhancement()).willReturn(enhancement); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + given(configuration.getConfigData(TokensConfig.class)).willReturn(tokensConfig); + given(tokensConfig.maxAllowedPendingAirdropsToClaim()).willReturn(10); + given(nativeOperations.getToken(FUNGIBLE_TOKEN_ID.tokenNum())).willReturn(null); + given(addressIdConverter.convert(asHeadlongAddress(SENDER_ID.accountNum()))) + .willReturn(SENDER_ID); + given(addressIdConverter.convert(OWNER_ACCOUNT_AS_ADDRESS)).willReturn(OWNER_ID); + + final var encoded = Bytes.wrapByteBuffer(CLAIM_AIRDROP.encodeCall(Tuple.singleton(new Tuple[] { + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 0L) + }))); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + + assertThrows( + HandleException.class, + () -> subject.decodeTokenClaimAirdrop(attempt), + PENDING_AIRDROP_ID_LIST_TOO_LONG.protoName()); + } + + @Test + void failsIfTokenIsNullHRCFT() { + final var encoded = Bytes.wrapByteBuffer(HRC_CLAIM_AIRDROP_FT.encodeCallWithArgs(OWNER_ACCOUNT_AS_ADDRESS)); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + + assertThrows( + HandleException.class, + () -> subject.decodeHrcClaimAirdropFt(attempt), + PENDING_AIRDROP_ID_LIST_TOO_LONG.protoName()); + } + + @Test + void failsIfTokenIsNullHRCNFT() { + final var encoded = + Bytes.wrapByteBuffer(HRC_CLAIM_AIRDROP_NFT.encodeCallWithArgs(OWNER_ACCOUNT_AS_ADDRESS, 1L)); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + + assertThrows( + HandleException.class, + () -> subject.decodeHrcClaimAirdropNft(attempt), + PENDING_AIRDROP_ID_LIST_TOO_LONG.protoName()); + } + + @Test + void claimAirdropDecoder1NFTTest() { + // given: + given(attempt.configuration()).willReturn(configuration); + given(attempt.enhancement()).willReturn(enhancement); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + given(configuration.getConfigData(TokensConfig.class)).willReturn(tokensConfig); + given(tokensConfig.maxAllowedPendingAirdropsToClaim()).willReturn(10); + given(nativeOperations.getToken(NON_FUNGIBLE_TOKEN_ID.tokenNum())).willReturn(NON_FUNGIBLE_TOKEN); + given(addressIdConverter.convert(asHeadlongAddress(SENDER_ID.accountNum()))) + .willReturn(SENDER_ID); + given(addressIdConverter.convert(OWNER_ACCOUNT_AS_ADDRESS)).willReturn(OWNER_ID); + + final var encoded = Bytes.wrapByteBuffer(CLAIM_AIRDROP.encodeCall(Tuple.singleton(new Tuple[] { + Tuple.of( + asHeadlongAddress(SENDER_ID.accountNum()), + OWNER_ACCOUNT_AS_ADDRESS, + NON_FUNGIBLE_TOKEN_HEADLONG_ADDRESS, + 1L) + }))); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + var expected = new ArrayList(); + expected.add(PendingAirdropId.newBuilder() + .senderId(SENDER_ID) + .receiverId(OWNER_ID) + .nonFungibleToken( + NftID.newBuilder().tokenId(NON_FUNGIBLE_TOKEN_ID).serialNumber(1L)) + .build()); + + // when: + final var decoded = subject.decodeTokenClaimAirdrop(attempt); + + // then: + assertNotNull(decoded.tokenClaimAirdrop()); + assertEquals(expected, decoded.tokenClaimAirdrop().pendingAirdrops()); + } + + @Test + void claimTAirdropHRC() { + // given: + given(attempt.redirectTokenId()).willReturn(FUNGIBLE_TOKEN_ID); + given(attempt.senderId()).willReturn(OWNER_ID); + + final var encoded = Bytes.wrapByteBuffer( + HRC_CLAIM_AIRDROP_FT.encodeCallWithArgs(asHeadlongAddress(SENDER_ID.accountNum()))); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + given(addressIdConverter.convert(asHeadlongAddress(SENDER_ID.accountNum()))) + .willReturn(SENDER_ID); + + final var decoded = subject.decodeHrcClaimAirdropFt(attempt); + var expected = new ArrayList(); + expected.add(PendingAirdropId.newBuilder() + .senderId(SENDER_ID) + .receiverId(OWNER_ID) + .fungibleTokenType(FUNGIBLE_TOKEN_ID) + .build()); + + // then: + assertNotNull(decoded.tokenClaimAirdrop()); + assertEquals(expected, decoded.tokenClaimAirdrop().pendingAirdrops()); + } + + @Test + void claimNFTAirdropHRC() { + // given: + given(attempt.redirectTokenId()).willReturn(NON_FUNGIBLE_TOKEN_ID); + given(attempt.senderId()).willReturn(OWNER_ID); + + final var encoded = Bytes.wrapByteBuffer( + HRC_CLAIM_AIRDROP_NFT.encodeCallWithArgs(asHeadlongAddress(SENDER_ID.accountNum()), 1L)); + given(attempt.inputBytes()).willReturn(encoded.toArrayUnsafe()); + given(addressIdConverter.convert(asHeadlongAddress(SENDER_ID.accountNum()))) + .willReturn(SENDER_ID); + + final var decoded = subject.decodeHrcClaimAirdropNft(attempt); + var expected = new ArrayList(); + expected.add(PendingAirdropId.newBuilder() + .senderId(SENDER_ID) + .receiverId(OWNER_ID) + .nonFungibleToken( + NftID.newBuilder().tokenId(NON_FUNGIBLE_TOKEN_ID).serialNumber(1L)) + .build()); + // then: + assertNotNull(decoded.tokenClaimAirdrop()); + assertEquals(expected, decoded.tokenClaimAirdrop().pendingAirdrops()); + } +} diff --git a/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropTranslatorTest.java b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropTranslatorTest.java new file mode 100644 index 000000000000..909542db94c2 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service-impl/src/test/java/com/hedera/node/app/service/contract/impl/test/exec/systemcontracts/hts/claimairdrops/TokenClaimAirdropTranslatorTest.java @@ -0,0 +1,304 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract.impl.test.exec.systemcontracts.hts.claimairdrops; + +import static com.hedera.node.app.service.contract.impl.test.TestHelpers.SENDER_ID; +import static com.hedera.node.app.service.contract.impl.test.exec.systemcontracts.CallAttemptHelpers.prepareHtsAttemptWithSelectorAndCustomConfig; +import static com.hedera.node.app.service.contract.impl.test.exec.systemcontracts.CallAttemptHelpers.prepareHtsAttemptWithSelectorForRedirectWithConfig; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.contract.impl.exec.gas.DispatchType; +import com.hedera.node.app.service.contract.impl.exec.gas.SystemContractGasCalculator; +import com.hedera.node.app.service.contract.impl.exec.scope.HederaNativeOperations; +import com.hedera.node.app.service.contract.impl.exec.scope.VerificationStrategies; +import com.hedera.node.app.service.contract.impl.exec.scope.VerificationStrategy; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.AddressIdConverter; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.DispatchForResponseCodeHtsCall; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.HtsCallAttempt; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropDecoder; +import com.hedera.node.app.service.contract.impl.exec.systemcontracts.hts.claimairdrops.TokenClaimAirdropTranslator; +import com.hedera.node.app.service.contract.impl.hevm.HederaWorldUpdater; +import com.hedera.node.config.data.ContractsConfig; +import com.swirlds.config.api.Configuration; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenClaimAirdropTranslatorTest { + + @Mock + private TokenClaimAirdropDecoder decoder; + + @Mock + private HtsCallAttempt attempt; + + @Mock + private Configuration configuration; + + @Mock + private ContractsConfig contractsConfig; + + @Mock + private HederaWorldUpdater.Enhancement enhancement; + + @Mock + private AddressIdConverter addressIdConverter; + + @Mock + private VerificationStrategies verificationStrategies; + + @Mock + private VerificationStrategy verificationStrategy; + + @Mock + private SystemContractGasCalculator gasCalculator; + + @Mock + private HederaNativeOperations nativeOperations; + + @Mock + private TransactionBody transactionBody; + + @Mock + private AccountID payerId; + + private TokenClaimAirdropTranslator subject; + + @BeforeEach + void setUp() { + subject = new TokenClaimAirdropTranslator(decoder); + } + + @Test + void testMatchesWhenClaimAirdropEnabled() { + // given: + given(configuration.getConfigData(ContractsConfig.class)).willReturn(contractsConfig); + given(contractsConfig.systemContractClaimAirdropsEnabled()).willReturn(true); + attempt = prepareHtsAttemptWithSelectorAndCustomConfig( + TokenClaimAirdropTranslator.CLAIM_AIRDROP, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + boolean matches = subject.matches(attempt); + + // then: + assertTrue(matches); + } + + @Test + void testMatchesWhenClaimAirdropDisabled() { + // given: + + given(configuration.getConfigData(ContractsConfig.class)).willReturn(contractsConfig); + given(contractsConfig.systemContractClaimAirdropsEnabled()).willReturn(false); + attempt = prepareHtsAttemptWithSelectorAndCustomConfig( + TokenClaimAirdropTranslator.CLAIM_AIRDROP, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + boolean matches = subject.matches(attempt); + + // then: + assertFalse(matches); + } + + @Test + void testMatchesHRCClaimFT() { + // given: + given(configuration.getConfigData(ContractsConfig.class)).willReturn(contractsConfig); + given(contractsConfig.systemContractClaimAirdropsEnabled()).willReturn(true); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + attempt = prepareHtsAttemptWithSelectorForRedirectWithConfig( + TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_FT, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + boolean matches = subject.matches(attempt); + + // then: + assertTrue(matches); + } + + @Test + void testMatchesHRCClaimNFT() { + // given: + given(configuration.getConfigData(ContractsConfig.class)).willReturn(contractsConfig); + given(contractsConfig.systemContractClaimAirdropsEnabled()).willReturn(true); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + attempt = prepareHtsAttemptWithSelectorForRedirectWithConfig( + TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_NFT, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + boolean matches = subject.matches(attempt); + + // then: + assertTrue(matches); + } + + @Test + void testMatchesHRCClaimFTDisabled() { + // given: + given(configuration.getConfigData(ContractsConfig.class)).willReturn(contractsConfig); + given(contractsConfig.systemContractClaimAirdropsEnabled()).willReturn(false); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + attempt = prepareHtsAttemptWithSelectorForRedirectWithConfig( + TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_FT, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + boolean matches = subject.matches(attempt); + + // then: + assertFalse(matches); + } + + @Test + void testMatchesHRCClaimNFTDisabled() { + // given: + given(configuration.getConfigData(ContractsConfig.class)).willReturn(contractsConfig); + given(contractsConfig.systemContractClaimAirdropsEnabled()).willReturn(false); + given(enhancement.nativeOperations()).willReturn(nativeOperations); + attempt = prepareHtsAttemptWithSelectorForRedirectWithConfig( + TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_NFT, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + boolean matches = subject.matches(attempt); + + // then: + assertFalse(matches); + } + + @Test + void testCallFromForClassic() { + // given: + given(addressIdConverter.convertSender(any())).willReturn(SENDER_ID); + given(verificationStrategies.activatingOnlyContractKeysFor(any(), anyBoolean(), any())) + .willReturn(verificationStrategy); + attempt = prepareHtsAttemptWithSelectorAndCustomConfig( + TokenClaimAirdropTranslator.CLAIM_AIRDROP, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + var call = subject.callFrom(attempt); + + // then: + assertEquals(DispatchForResponseCodeHtsCall.class, call.getClass()); + } + + @Test + void callFromHRCClaimFTAirdrop() { + // given: + given(addressIdConverter.convertSender(any())).willReturn(SENDER_ID); + given(verificationStrategies.activatingOnlyContractKeysFor(any(), anyBoolean(), any())) + .willReturn(verificationStrategy); + attempt = prepareHtsAttemptWithSelectorAndCustomConfig( + TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_FT, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + var call = subject.callFrom(attempt); + + // then: + assertEquals(DispatchForResponseCodeHtsCall.class, call.getClass()); + } + + @Test + void callFromHRCCancelNFTAirdrop() { + // given: + given(addressIdConverter.convertSender(any())).willReturn(SENDER_ID); + given(verificationStrategies.activatingOnlyContractKeysFor(any(), anyBoolean(), any())) + .willReturn(verificationStrategy); + attempt = prepareHtsAttemptWithSelectorAndCustomConfig( + TokenClaimAirdropTranslator.HRC_CLAIM_AIRDROP_NFT, + subject, + enhancement, + addressIdConverter, + verificationStrategies, + gasCalculator, + configuration); + + // when: + var call = subject.callFrom(attempt); + + // then: + assertEquals(DispatchForResponseCodeHtsCall.class, call.getClass()); + } + + @Test + void testGasRequirement() { + long expectedGas = 1000L; + when(gasCalculator.gasRequirement(transactionBody, DispatchType.TOKEN_CLAIM_AIRDROP, payerId)) + .thenReturn(expectedGas); + + long gas = TokenClaimAirdropTranslator.gasRequirement(transactionBody, gasCalculator, enhancement, payerId); + + assertEquals(expectedGas, gas); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/impl/TokenClaimAirdropSystemContractTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/impl/TokenClaimAirdropSystemContractTest.java new file mode 100644 index 000000000000..11a8a14abc56 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/impl/TokenClaimAirdropSystemContractTest.java @@ -0,0 +1,403 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.junit.support.translators.impl; + +import static com.hedera.services.bdd.junit.TestTags.SMART_CONTRACT; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.includingFungiblePendingAirdrop; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.includingNftPendingAirdrop; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAirdrop; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.junit.HapiTest; +import com.hedera.services.bdd.junit.HapiTestLifecycle; +import com.hedera.services.bdd.junit.OrderedInIsolation; +import com.hedera.services.bdd.junit.support.TestLifecycle; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.SpecOperation; +import com.hedera.services.bdd.spec.dsl.annotations.Account; +import com.hedera.services.bdd.spec.dsl.annotations.Contract; +import com.hedera.services.bdd.spec.dsl.annotations.FungibleToken; +import com.hedera.services.bdd.spec.dsl.annotations.NonFungibleToken; +import com.hedera.services.bdd.spec.dsl.entities.SpecAccount; +import com.hedera.services.bdd.spec.dsl.entities.SpecContract; +import com.hedera.services.bdd.spec.dsl.entities.SpecFungibleToken; +import com.hedera.services.bdd.spec.dsl.entities.SpecNonFungibleToken; +import com.hedera.services.bdd.spec.dsl.operations.queries.GetBalanceOperation; +import com.hedera.services.bdd.spec.transactions.token.TokenMovement; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.DynamicTest; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Tag; + +@Tag(SMART_CONTRACT) +@HapiTestLifecycle +@OrderedInIsolation +class TokenClaimAirdropSystemContractTest { + + @Contract(contract = "ClaimAirdrop", creationGas = 1_000_000L) + static SpecContract claimAirdrop; + + @Account(name = "sender", tinybarBalance = 100_000_000_000L) + static SpecAccount sender; + + @Account(name = "receiver", maxAutoAssociations = 0) + static SpecAccount receiver; + + @FungibleToken(name = "token", initialSupply = 1000) + static SpecFungibleToken token; + + @BeforeAll + public static void setUp(final @NonNull TestLifecycle lifecycle) { + lifecycle.doAdhoc( + sender.authorizeContract(claimAirdrop), + receiver.authorizeContract(claimAirdrop), + sender.associateTokens(token), + token.treasury().transferUnitsTo(sender, 1000, token)); + } + + @Order(0) + @HapiTest + @DisplayName("Can claim 1 fungible airdrop") + public Stream claimAirdrop() { + return hapiTest( + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(token.name(), 0)), + tokenAirdrop(moving(10, token.name()).between(sender.name(), receiver.name())) + .payingWith(sender.name()) + .via("tokenAirdrop"), + getTxnRecord("tokenAirdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moving(10, token.name()).between(sender.name(), receiver.name())))), + claimAirdrop + .call("claim", sender, receiver, token) + .payingWith(receiver) + .via("claimAirdrop"), + getTxnRecord("claimAirdrop").hasPriority(recordWith().pendingAirdropsCount(0)), + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(token.name(), 10))); + } + + @Order(1) + @HapiTest + @DisplayName("Can claim 1 nft airdrop") + public Stream claimNftAirdrop(@NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft) { + return hapiTest( + sender.associateTokens(nft), + nft.treasury().transferNFTsTo(sender, nft, 1L), + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(nft.name(), 0)), + tokenAirdrop(movingUnique(nft.name(), 1L).between(sender.name(), receiver.name())) + .payingWith(sender.name()) + .via("tokenAirdrop"), + getTxnRecord("tokenAirdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingNftPendingAirdrop( + movingUnique(nft.name(), 1L).between(sender.name(), receiver.name())))), + claimAirdrop + .call("claimNFTAirdrop", sender, receiver, nft, 1L) + .payingWith(receiver) + .via("claimNFTAirdrop"), + getTxnRecord("claimNFTAirdrop").hasPriority(recordWith().pendingAirdropsCount(0)), + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(nft.name(), 1))); + } + + @Order(2) + @HapiTest + @DisplayName("Can claim 10 fungible airdrops") + public Stream claim10Airdrops( + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token1, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token2, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token3, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token4, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token5, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft1, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft2, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft3, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft4, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft5) { + final var tokenList = List.of(token1, token2, token3, token4, token5); + final var nftList = List.of(nft1, nft2, nft3, nft4, nft5); + return hapiTest(withOpContext((spec, opLog) -> { + allRunFor(spec, prepareTokensAndBalances(sender, receiver, tokenList, nftList)); + prepareAirdrops(tokenList, nftList, spec); + final var senders = prepareSenderAddresses( + spec, sender, sender, sender, sender, sender, sender, sender, sender, sender, sender); + final var receivers = prepareReceiverAddresses( + spec, receiver, receiver, receiver, receiver, receiver, receiver, receiver, receiver, receiver, + receiver); + final var tokens = prepareTokenAddresses(spec, token1, token2, token3, token4, token5); + final var nfts = prepareNftAddresses(spec, nft1, nft2, nft3, nft4, nft5); + final var combined = + Stream.concat(Arrays.stream(tokens), Arrays.stream(nfts)).toArray(Address[]::new); + final var serials = new long[] {0L, 0L, 0L, 0L, 0L, 1L, 1L, 1L, 1L, 1L}; + allRunFor( + spec, + claimAirdrop + .call("claimAirdrops", senders, receivers, combined, serials) + .via("claimAirdrops"), + getTxnRecord("claimAirdrops").hasPriority(recordWith().pendingAirdropsCount(0)), + checkForBalances(receiver, tokenList, nftList)); + })); + } + + @Order(3) + @HapiTest + @DisplayName("Can claim 3 fungible airdrops") + public Stream claim3Airdrops( + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token1, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token2, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token3) { + final var tokenList = List.of(token1, token2, token3); + return hapiTest(withOpContext((spec, opLog) -> { + allRunFor(spec, prepareTokensAndBalances(sender, receiver, tokenList, List.of())); + prepareAirdrops(tokenList, List.of(), spec); + final var senders = prepareSenderAddresses(spec, sender, sender, sender); + final var receivers = prepareReceiverAddresses(spec, receiver, receiver, receiver); + final var tokens = prepareTokenAddresses(spec, token1, token2, token3); + final var serials = new long[] {0L, 0L, 0L}; + allRunFor( + spec, + claimAirdrop + .call("claimAirdrops", senders, receivers, tokens, serials) + .via("claimAirdrops"), + getTxnRecord("claimAirdrops").hasPriority(recordWith().pendingAirdropsCount(0)), + checkForBalances(receiver, tokenList, List.of())); + })); + } + + @Order(4) + @HapiTest + @DisplayName("Fails to claim 11 pending airdrops") + public Stream failToClaim11Airdrops( + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token1, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token2, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token3, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token4, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token5, + @NonNull @FungibleToken(initialSupply = 1_000_000L) final SpecFungibleToken token6, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft1, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft2, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft3, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft4, + @NonNull @NonFungibleToken(numPreMints = 1) final SpecNonFungibleToken nft5) { + final var tokenList = List.of(token1, token2, token3, token4, token5, token6); + final var nftList = List.of(nft1, nft2, nft3, nft4, nft5); + return hapiTest(withOpContext((spec, opLog) -> { + allRunFor(spec, prepareTokensAndBalances(sender, receiver, tokenList, nftList)); + // Spread transactions to avoid hitting the max airdrops limit + prepareAirdrops(List.of(token1, token2, token3), List.of(), spec); + prepareAirdrops(List.of(token4, token5, token6), List.of(), spec); + prepareAirdrops(List.of(), nftList, spec); + final var senders = prepareSenderAddresses( + spec, sender, sender, sender, sender, sender, sender, sender, sender, sender, sender, sender); + final var receivers = prepareReceiverAddresses( + spec, receiver, receiver, receiver, receiver, receiver, receiver, receiver, receiver, receiver, + receiver, receiver); + final var tokens = prepareTokenAddresses(spec, token1, token2, token3, token4, token5); + final var nfts = prepareNftAddresses(spec, nft1, nft2, nft3, nft4, nft5); + final var combined = + Stream.concat(Arrays.stream(tokens), Arrays.stream(nfts)).toArray(Address[]::new); + final var serials = new long[] {0L, 0L, 0L, 0L, 0L, 0L, 1L, 1L, 1L, 1L, 1L}; + allRunFor( + spec, + claimAirdrop + .call("claimAirdrops", senders, receivers, combined, serials) + .via("claimAirdrops") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + })); + } + + @Order(5) + @HapiTest + @DisplayName("Fails to claim pending airdrop with invalid token") + public Stream failToClaim1AirdropWithInvalidToken() { + return hapiTest(claimAirdrop + .call("claim", sender, receiver, receiver) + .payingWith(sender) + .via("claimAirdrop") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + } + + @Order(6) + @HapiTest + @DisplayName("Fails to claim pending airdrop with invalid sender") + public Stream failToClaim1AirdropWithInvalidSender() { + return hapiTest(claimAirdrop + .call("claim", token, receiver, token) + .payingWith(sender) + .via("claimAirdrop") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + } + + @Order(7) + @HapiTest + @DisplayName("Fails to claim airdrop having no pending airdrops") + public Stream failToClaimAirdropWhenThereAreNoPending() { + return hapiTest(claimAirdrop + .call("claim", sender, receiver, token) + .payingWith(sender) + .via("claimAirdrop") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + } + + @Order(8) + @HapiTest + @DisplayName("Fails to claim pending airdrop with invalid receiver") + public Stream failToClaim1AirdropWithInvalidReceiver() { + return hapiTest(claimAirdrop + .call("claim", sender, token, token) + .payingWith(sender) + .via("claimAirdrop") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + } + + @Order(9) + @HapiTest + @DisplayName("Fails to claim nft airdrop with invalid nft") + public Stream failToClaim1AirdropWithInvalidNft() { + return hapiTest(claimAirdrop + .call("claimNFTAirdrop", sender, receiver, receiver, 1L) + .payingWith(sender) + .via("claimAirdrop") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + } + + @Order(10) + @HapiTest + @DisplayName("Fails to claim nft airdrop with invalid nft serial") + public Stream failToClaim1AirdropWithInvalidSerial(@NonFungibleToken final SpecNonFungibleToken nft) { + return hapiTest( + sender.associateTokens(nft), + claimAirdrop + .call("claimNFTAirdrop", sender, receiver, nft, 1L) + .payingWith(sender) + .via("claimAirdrop") + .andAssert(txn -> txn.hasKnownStatus(CONTRACT_REVERT_EXECUTED))); + } + + @Order(11) + @HapiTest + private void prepareAirdrops( + @NonNull List tokens, @NonNull List nfts, @NonNull HapiSpec spec) { + var tokenMovements = prepareFTAirdrops(sender, receiver, tokens); + var nftMovements = prepareNFTAirdrops(sender, receiver, nfts); + allRunFor( + spec, + tokenAirdrop(Stream.of(tokenMovements, nftMovements) + .flatMap(Collection::stream) + .toArray(TokenMovement[]::new)) + .payingWith(sender.name()) + .via("tokenAirdrop"), + getTxnRecord("tokenAirdrop") + .hasPriority(recordWith() + .pendingAirdrops( + includingFungiblePendingAirdrop(tokenMovements.toArray(TokenMovement[]::new))) + .pendingAirdrops( + includingNftPendingAirdrop(nftMovements.toArray(TokenMovement[]::new))))); + } + + private SpecOperation[] prepareTokensAndBalances( + final SpecAccount sender, + final SpecAccount receiver, + final List tokens, + final List nfts) { + ArrayList specOperations = new ArrayList<>(); + specOperations.addAll(List.of( + sender.associateTokens(tokens.toArray(SpecFungibleToken[]::new)), + sender.associateTokens(nfts.toArray(SpecNonFungibleToken[]::new)), + checkForEmptyBalance(receiver, tokens, nfts))); + specOperations.addAll(tokens.stream() + .map(token -> token.treasury().transferUnitsTo(sender, 1_000L, token)) + .toList()); + specOperations.addAll(nfts.stream() + .map(nft -> nft.treasury().transferNFTsTo(sender, nft, 1L)) + .toList()); + + return specOperations.toArray(SpecOperation[]::new); + } + + private GetBalanceOperation checkForEmptyBalance( + final SpecAccount receiver, final List tokens, final List nfts) { + return receiver.getBalance().andAssert(balance -> { + tokens.forEach(token -> balance.hasTokenBalance(token.name(), 0L)); + nfts.forEach(nft -> balance.hasTokenBalance(nft.name(), 0L)); + }); + } + + private GetBalanceOperation checkForBalances( + final SpecAccount receiver, final List tokens, final List nfts) { + return receiver.getBalance().andAssert(balance -> { + tokens.forEach(token -> balance.hasTokenBalance(token.name(), 10L)); + nfts.forEach(nft -> balance.hasTokenBalance(nft.name(), 1L)); + }); + } + + private Address[] prepareSenderAddresses(@NonNull HapiSpec spec, @NonNull SpecAccount... senders) { + return Arrays.stream(senders) + .map(sender -> sender.addressOn(spec.targetNetworkOrThrow())) + .toArray(Address[]::new); + } + + private Address[] prepareReceiverAddresses(@NonNull HapiSpec spec, @NonNull SpecAccount... receivers) { + return Arrays.stream(receivers) + .map(receiver -> receiver.addressOn(spec.targetNetworkOrThrow())) + .toArray(Address[]::new); + } + + private Address[] prepareTokenAddresses(@NonNull HapiSpec spec, @NonNull SpecFungibleToken... tokens) { + return Arrays.stream(tokens) + .map(token -> token.addressOn(spec.targetNetworkOrThrow())) + .toArray(Address[]::new); + } + + private Address[] prepareNftAddresses(@NonNull HapiSpec spec, @NonNull SpecNonFungibleToken... nfts) { + return Arrays.stream(nfts) + .map(nft -> nft.addressOn(spec.targetNetworkOrThrow())) + .toArray(Address[]::new); + } + + private List prepareFTAirdrops( + @NonNull final SpecAccount sender, + @NonNull final SpecAccount receiver, + @NonNull final List tokens) { + return tokens.stream() + .map(token -> moving(10, token.name()).between(sender.name(), receiver.name())) + .toList(); + } + + private List prepareNFTAirdrops( + @NonNull final SpecAccount sender, + @NonNull final SpecAccount receiver, + @NonNull final List nfts) { + return nfts.stream() + .map(nft -> movingUnique(nft.name(), 1L).between(sender.name(), receiver.name())) + .toList(); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/dsl/contracts/TokenRedirectContract.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/dsl/contracts/TokenRedirectContract.java index 0bd0d195c450..59b13e11743c 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/dsl/contracts/TokenRedirectContract.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/dsl/contracts/TokenRedirectContract.java @@ -22,6 +22,8 @@ */ public enum TokenRedirectContract { HRC("HRC"), + // TODO: Update this to HRC904 once all tests are merged + HRC904CLAIM("HRC904TokenClaim"), HRC904REJECT("HRC904Reject"), ERC20("ERC20ABI"), ERC721("ERC721ABI"); diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/HRCTokenClaimTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/HRCTokenClaimTest.java new file mode 100644 index 000000000000..07b2a4ac9277 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/HRCTokenClaimTest.java @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.junit.TestTags.SMART_CONTRACT; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; +import static com.hedera.services.bdd.spec.dsl.contracts.TokenRedirectContract.HRC904CLAIM; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAirdrop; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_PENDING_AIRDROP_ID; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; + +import com.hedera.services.bdd.junit.HapiTest; +import com.hedera.services.bdd.junit.HapiTestLifecycle; +import com.hedera.services.bdd.junit.OrderedInIsolation; +import com.hedera.services.bdd.junit.support.TestLifecycle; +import com.hedera.services.bdd.spec.dsl.annotations.Account; +import com.hedera.services.bdd.spec.dsl.annotations.FungibleToken; +import com.hedera.services.bdd.spec.dsl.annotations.NonFungibleToken; +import com.hedera.services.bdd.spec.dsl.entities.SpecAccount; +import com.hedera.services.bdd.spec.dsl.entities.SpecFungibleToken; +import com.hedera.services.bdd.spec.dsl.entities.SpecNonFungibleToken; +import com.hedera.services.bdd.spec.transactions.token.TokenMovement; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.DynamicTest; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Tag; + +@Tag(SMART_CONTRACT) +@HapiTestLifecycle +@OrderedInIsolation +public class HRCTokenClaimTest { + + @Account(name = "sender", tinybarBalance = 100_000_000_000L) + static SpecAccount sender; + + @Account(name = "receiver", tinybarBalance = 100_000_000_000L, maxAutoAssociations = 0) + static SpecAccount receiver; + + @FungibleToken(name = "token", initialSupply = 1_000_000L) + static SpecFungibleToken token; + + @NonFungibleToken(name = "nft", numPreMints = 1) + static SpecNonFungibleToken nft; + + @BeforeAll + public static void setup(@NonNull final TestLifecycle lifecycle) { + lifecycle.doAdhoc( + sender.associateTokens(token, nft), + token.treasury().transferUnitsTo(sender, 10L, token), + nft.treasury().transferNFTsTo(sender, nft, 1L)); + } + + @Order(0) + @HapiTest + @DisplayName("Can claim airdrop of fungible token") + public Stream canClaimAirdropOfFungibleToken() { + return hapiTest( + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(token.name(), 0L)), + tokenAirdrop(moving(10L, token.name()).between(sender.name(), receiver.name())) + .payingWith(sender.name()), + token.call(HRC904CLAIM, "claimAirdropFT", sender) + .payingWith(receiver) + .with(call -> call.signingWith(receiver.name())), + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(token.name(), 10L))); + } + + @Order(1) + @HapiTest + @DisplayName("Can claim airdrop of nft token") + public Stream canClaimAirdropOfNftToken() { + return hapiTest( + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(nft.name(), 0L)), + tokenAirdrop(TokenMovement.movingUnique(nft.name(), 1L).between(sender.name(), receiver.name())) + .payingWith(sender.name()), + nft.call(HRC904CLAIM, "claimAirdropNFT", sender, 1L) + .payingWith(receiver) + .with(call -> call.signingWith(receiver.name())), + receiver.getBalance().andAssert(balance -> balance.hasTokenBalance(nft.name(), 1L))); + } + + @Order(2) + @HapiTest + @DisplayName("Cannot claim airdrop if not existing") + public Stream cannotClaimAirdropWhenNotExisting() { + return hapiTest(token.call(HRC904CLAIM, "claimAirdropFT", sender) + .payingWith(receiver) + .with(call -> call.signingWith(receiver.name())) + .andAssert(txn -> txn.hasKnownStatuses(SUCCESS, INVALID_PENDING_AIRDROP_ID))); + } + + @Order(3) + @HapiTest + @DisplayName("Cannot claim airdrop if sender not existing") + public Stream cannotClaimAirdropWhenSenderNotExisting() { + return hapiTest(token.call(HRC904CLAIM, "claimAirdropFT", token) + .payingWith(receiver) + .with(call -> call.signingWith(receiver.name())) + .andAssert(txn -> txn.hasKnownStatuses(SUCCESS, INVALID_PENDING_AIRDROP_ID))); + } + + @Order(4) + @HapiTest + @DisplayName("Cannot claim nft airdrop if not existing") + public Stream cannotClaimNftAirdropWhenNotExisting() { + return hapiTest(nft.call(HRC904CLAIM, "claimAirdropNFT", sender, 1L) + .payingWith(receiver) + .with(call -> call.signingWith(receiver.name())) + .andAssert(txn -> txn.hasKnownStatuses(SUCCESS, INVALID_PENDING_AIRDROP_ID))); + } + + @Order(5) + @HapiTest + @DisplayName("Cannot claim nft airdrop if sender not existing") + public Stream cannotClaimNftAirdropWhenSenderNotExisting() { + return hapiTest(nft.call(HRC904CLAIM, "claimAirdropNFT", nft, 1L) + .payingWith(receiver) + .with(call -> call.signingWith(receiver.name())) + .andAssert(txn -> txn.hasKnownStatuses(SUCCESS, INVALID_PENDING_AIRDROP_ID))); + } +} diff --git a/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.bin b/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.bin new file mode 100644 index 000000000000..abdf3b1677a1 --- /dev/null +++ b/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.bin @@ -0,0 +1 @@ +608060405234801561001057600080fd5b506115f6806100206000396000f3fe608060405234801561001057600080fd5b50600436106100625760003560e01c806315dacbea14610067578063482bf6a4146100975780635e1c75e1146100c7578063618dc65e146100f757806370b25e5c146101285780639b23d3d914610158575b600080fd5b610081600480360381019061007c9190610bef565b610188565b60405161008e9190610c72565b60405180910390f35b6100b160048036038101906100ac9190610cb9565b6102a6565b6040516100be9190610c72565b60405180910390f35b6100e160048036038101906100dc9190610d20565b61040a565b6040516100ee9190610c72565b60405180910390f35b610111600480360381019061010c9190610eb9565b61055b565b60405161011f929190610fad565b60405180910390f35b610142600480360381019061013d9190611168565b6106b7565b60405161014f9190610c72565b60405180910390f35b610172600480360381019061016d9190610bef565b6108a7565b60405161017f9190610c72565b60405180910390f35b600080600061016773ffffffffffffffffffffffffffffffffffffffff166315dacbea60e01b888888886040516024016101c5949392919061125d565b604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505060405161022f91906112de565b6000604051808303816000865af19150503d806000811461026c576040519150601f19603f3d011682016040523d82523d6000602084013e610271565b606091505b509150915081610282576015610297565b80806020019051810190610296919061132e565b5b60030b92505050949350505050565b600080600167ffffffffffffffff8111156102c4576102c3610d8e565b5b6040519080825280602002602001820160405280156102fd57816020015b6102ea610ada565b8152602001906001900390816102e25790505b509050610308610ada565b86816000019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505085816020019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505084816040019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505083816060019060070b908160070b8152505080826000815181106103d7576103d661135b565b5b60200260200101819052506103eb826109c5565b9250601660030b8360070b1461040057600080fd5b5050949350505050565b600080600167ffffffffffffffff81111561042857610427610d8e565b5b60405190808252806020026020018201604052801561046157816020015b61044e610ada565b8152602001906001900390816104465790505b50905061046c610ada565b85816000019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505084816020019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505083816040019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505080826000815181106105295761052861135b565b5b602002602001018190525061053d826109c5565b9250601660030b8360070b1461055257600080fd5b50509392505050565b6000606060008061016773ffffffffffffffffffffffffffffffffffffffff1663618dc65e60e01b878760405160240161059692919061138a565b604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505060405161060091906112de565b6000604051808303816000865af19150503d806000811461063d576040519150601f19603f3d011682016040523d82523d6000602084013e610642565b606091505b50915091507f4af4780e06fe8cb9df64b0794fa6f01399af979175bb988e35e0e57e594567bc82826040516106789291906113d5565b60405180910390a18161069c576015604051806020016040528060008152506106a0565b6016815b8160030b9150809450819550505050509250929050565b6000808551905060008167ffffffffffffffff8111156106da576106d9610d8e565b5b60405190808252806020026020018201604052801561071357816020015b610700610ada565b8152602001906001900390816106f85790505b50905060005b8281101561087e57610729610ada565b88828151811061073c5761073b61135b565b5b6020026020010151816000019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff168152505087828151811061078e5761078d61135b565b5b6020026020010151816020019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff16815250508682815181106107e0576107df61135b565b5b6020026020010151816040019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff16815250508582815181106108325761083161135b565b5b6020026020010151816060019060070b908160070b815250508083838151811061085f5761085e61135b565b5b602002602001018190525050808061087690611434565b915050610719565b50610888816109c5565b9250601660030b8360070b1461089d57600080fd5b5050949350505050565b600080600061016773ffffffffffffffffffffffffffffffffffffffff16639b23d3d960e01b888888886040516024016108e4949392919061125d565b604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505060405161094e91906112de565b6000604051808303816000865af19150503d806000811461098b576040519150601f19603f3d011682016040523d82523d6000602084013e610990565b606091505b5091509150816109a15760156109b6565b808060200190518101906109b5919061132e565b5b60030b92505050949350505050565b600080600061016773ffffffffffffffffffffffffffffffffffffffff16630596164160e01b856040516024016109fc919061159e565b604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff8381831617835250505050604051610a6691906112de565b6000604051808303816000865af19150503d8060008114610aa3576040519150601f19603f3d011682016040523d82523d6000602084013e610aa8565b606091505b509150915081610ab9576015610ace565b80806020019051810190610acd919061132e565b5b60030b92505050919050565b6040518060800160405280600073ffffffffffffffffffffffffffffffffffffffff168152602001600073ffffffffffffffffffffffffffffffffffffffff168152602001600073ffffffffffffffffffffffffffffffffffffffff168152602001600060070b81525090565b6000604051905090565b600080fd5b600080fd5b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000610b8682610b5b565b9050919050565b610b9681610b7b565b8114610ba157600080fd5b50565b600081359050610bb381610b8d565b92915050565b6000819050919050565b610bcc81610bb9565b8114610bd757600080fd5b50565b600081359050610be981610bc3565b92915050565b60008060008060808587031215610c0957610c08610b51565b5b6000610c1787828801610ba4565b9450506020610c2887828801610ba4565b9350506040610c3987828801610ba4565b9250506060610c4a87828801610bda565b91505092959194509250565b60008160070b9050919050565b610c6c81610c56565b82525050565b6000602082019050610c876000830184610c63565b92915050565b610c9681610c56565b8114610ca157600080fd5b50565b600081359050610cb381610c8d565b92915050565b60008060008060808587031215610cd357610cd2610b51565b5b6000610ce187828801610ba4565b9450506020610cf287828801610ba4565b9350506040610d0387828801610ba4565b9250506060610d1487828801610ca4565b91505092959194509250565b600080600060608486031215610d3957610d38610b51565b5b6000610d4786828701610ba4565b9350506020610d5886828701610ba4565b9250506040610d6986828701610ba4565b9150509250925092565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b610dc682610d7d565b810181811067ffffffffffffffff82111715610de557610de4610d8e565b5b80604052505050565b6000610df8610b47565b9050610e048282610dbd565b919050565b600067ffffffffffffffff821115610e2457610e23610d8e565b5b610e2d82610d7d565b9050602081019050919050565b82818337600083830152505050565b6000610e5c610e5784610e09565b610dee565b905082815260208101848484011115610e7857610e77610d78565b5b610e83848285610e3a565b509392505050565b600082601f830112610ea057610e9f610d73565b5b8135610eb0848260208601610e49565b91505092915050565b60008060408385031215610ed057610ecf610b51565b5b6000610ede85828601610ba4565b925050602083013567ffffffffffffffff811115610eff57610efe610b56565b5b610f0b85828601610e8b565b9150509250929050565b6000819050919050565b610f2881610f15565b82525050565b600081519050919050565b600082825260208201905092915050565b60005b83811015610f68578082015181840152602081019050610f4d565b60008484015250505050565b6000610f7f82610f2e565b610f898185610f39565b9350610f99818560208601610f4a565b610fa281610d7d565b840191505092915050565b6000604082019050610fc26000830185610f1f565b8181036020830152610fd48184610f74565b90509392505050565b600067ffffffffffffffff821115610ff857610ff7610d8e565b5b602082029050602081019050919050565b600080fd5b600061102161101c84610fdd565b610dee565b9050808382526020820190506020840283018581111561104457611043611009565b5b835b8181101561106d57806110598882610ba4565b845260208401935050602081019050611046565b5050509392505050565b600082601f83011261108c5761108b610d73565b5b813561109c84826020860161100e565b91505092915050565b600067ffffffffffffffff8211156110c0576110bf610d8e565b5b602082029050602081019050919050565b60006110e46110df846110a5565b610dee565b9050808382526020820190506020840283018581111561110757611106611009565b5b835b81811015611130578061111c8882610ca4565b845260208401935050602081019050611109565b5050509392505050565b600082601f83011261114f5761114e610d73565b5b813561115f8482602086016110d1565b91505092915050565b6000806000806080858703121561118257611181610b51565b5b600085013567ffffffffffffffff8111156111a05761119f610b56565b5b6111ac87828801611077565b945050602085013567ffffffffffffffff8111156111cd576111cc610b56565b5b6111d987828801611077565b935050604085013567ffffffffffffffff8111156111fa576111f9610b56565b5b61120687828801611077565b925050606085013567ffffffffffffffff81111561122757611226610b56565b5b6112338782880161113a565b91505092959194509250565b61124881610b7b565b82525050565b61125781610bb9565b82525050565b6000608082019050611272600083018761123f565b61127f602083018661123f565b61128c604083018561123f565b611299606083018461124e565b95945050505050565b600081905092915050565b60006112b882610f2e565b6112c281856112a2565b93506112d2818560208601610f4a565b80840191505092915050565b60006112ea82846112ad565b915081905092915050565b60008160030b9050919050565b61130b816112f5565b811461131657600080fd5b50565b60008151905061132881611302565b92915050565b60006020828403121561134457611343610b51565b5b600061135284828501611319565b91505092915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b600060408201905061139f600083018561123f565b81810360208301526113b18184610f74565b90509392505050565b60008115159050919050565b6113cf816113ba565b82525050565b60006040820190506113ea60008301856113c6565b81810360208301526113fc8184610f74565b90509392505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600061143f82610bb9565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff820361147157611470611405565b5b600182019050919050565b600081519050919050565b600082825260208201905092915050565b6000819050602082019050919050565b6114b181610b7b565b82525050565b6114c081610c56565b82525050565b6080820160008201516114dc60008501826114a8565b5060208201516114ef60208501826114a8565b50604082015161150260408501826114a8565b50606082015161151560608501826114b7565b50505050565b600061152783836114c6565b60808301905092915050565b6000602082019050919050565b600061154b8261147c565b6115558185611487565b935061156083611498565b8060005b83811015611591578151611578888261151b565b975061158383611533565b925050600181019050611564565b5085935050505092915050565b600060208201905081810360008301526115b88184611540565b90509291505056fea2646970667358221220f12e32af50d51ffc5be75444f1e0048a484234296fc93277f8077bf3be18fcc364736f6c63430008120033 \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.json b/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.json new file mode 100644 index 000000000000..ae2ebb5c53b4 --- /dev/null +++ b/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.json @@ -0,0 +1,215 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "bool", + "name": "", + "type": "bool" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "", + "type": "bytes" + } + ], + "name": "CallResponseEvent", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "token", + "type": "address" + } + ], + "name": "claim", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "senders", + "type": "address[]" + }, + { + "internalType": "address[]", + "name": "receivers", + "type": "address[]" + }, + { + "internalType": "address[]", + "name": "tokens", + "type": "address[]" + }, + { + "internalType": "int64[]", + "name": "serials", + "type": "int64[]" + } + ], + "name": "claimAirdrops", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "token", + "type": "address" + }, + { + "internalType": "int64", + "name": "serial", + "type": "int64" + } + ], + "name": "claimNFTAirdrop", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "token", + "type": "address" + }, + { + "internalType": "bytes", + "name": "encodedFunctionSelector", + "type": "bytes" + } + ], + "name": "redirectForToken", + "outputs": [ + { + "internalType": "int256", + "name": "responseCode", + "type": "int256" + }, + { + "internalType": "bytes", + "name": "response", + "type": "bytes" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "token", + "type": "address" + }, + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "token", + "type": "address" + }, + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "serialNumber", + "type": "uint256" + } + ], + "name": "transferFromNFT", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + } +] \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.sol b/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.sol new file mode 100644 index 000000000000..779a30a9b37e --- /dev/null +++ b/hedera-node/test-clients/src/main/resources/contract/contracts/ClaimAirdrop/ClaimAirdrop.sol @@ -0,0 +1,63 @@ +// SPDX-License-Identifier: Apache-2.0 +pragma solidity >=0.5.0 <0.9.0; +pragma experimental ABIEncoderV2; + +import "./HederaTokenService.sol"; + +contract ClaimAirdrop is HederaTokenService { + + function claim(address sender, address receiver, address token) public returns(int64 responseCode){ + IHederaTokenService.PendingAirdrop[] memory pendingAirdrops = new IHederaTokenService.PendingAirdrop[](1); + + IHederaTokenService.PendingAirdrop memory pendingAirdrop; + pendingAirdrop.sender = sender; + pendingAirdrop.receiver = receiver; + pendingAirdrop.token = token; + + pendingAirdrops[0] = pendingAirdrop; + + responseCode = claimAirdrops(pendingAirdrops); + if (responseCode != HederaResponseCodes.SUCCESS) { + revert(); + } + return responseCode; + } + + function claimNFTAirdrop(address sender, address receiver, address token, int64 serial) public returns(int64 responseCode){ + IHederaTokenService.PendingAirdrop[] memory pendingAirdrops = new IHederaTokenService.PendingAirdrop[](1); + + IHederaTokenService.PendingAirdrop memory pendingAirdrop; + pendingAirdrop.sender = sender; + pendingAirdrop.receiver = receiver; + pendingAirdrop.token = token; + pendingAirdrop.serial = serial; + + pendingAirdrops[0] = pendingAirdrop; + + responseCode = claimAirdrops(pendingAirdrops); + if (responseCode != HederaResponseCodes.SUCCESS) { + revert(); + } + return responseCode; + } + + function claimAirdrops(address[] memory senders, address[] memory receivers, address[] memory tokens, int64[] memory serials) public returns (int64 responseCode) { + uint length = senders.length; + IHederaTokenService.PendingAirdrop[] memory pendingAirdrops = new IHederaTokenService.PendingAirdrop[](length); + for (uint i = 0; i < length; i++) { + IHederaTokenService.PendingAirdrop memory pendingAirdrop; + pendingAirdrop.sender = senders[i]; + pendingAirdrop.receiver = receivers[i]; + pendingAirdrop.token = tokens[i]; + pendingAirdrop.serial = serials[i]; + + pendingAirdrops[i] = pendingAirdrop; + } + + responseCode = claimAirdrops(pendingAirdrops); + if (responseCode != HederaResponseCodes.SUCCESS) { + revert(); + } + return responseCode; + } +} \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.bin b/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.bin new file mode 100644 index 000000000000..2b13ea934ede --- /dev/null +++ b/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.bin @@ -0,0 +1 @@ +608060405234801561001057600080fd5b506103a6806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c806363ada5d71461003b578063a83bc5b21461006b575b600080fd5b61005560048036038101906100509190610244565b61009b565b6040516100629190610293565b60405180910390f35b610085600480360381019061008091906102ae565b610123565b6040516100929190610293565b60405180910390f35b60003073ffffffffffffffffffffffffffffffffffffffff166363ada5d784846040518363ffffffff1660e01b81526004016100d89291906102ea565b6020604051808303816000875af11580156100f7573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061011b9190610328565b905092915050565b60003073ffffffffffffffffffffffffffffffffffffffff1663a83bc5b2836040518263ffffffff1660e01b815260040161015e9190610355565b6020604051808303816000875af115801561017d573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906101a19190610328565b9050919050565b600080fd5b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006101d8826101ad565b9050919050565b6101e8816101cd565b81146101f357600080fd5b50565b600081359050610205816101df565b92915050565b60008160070b9050919050565b6102218161020b565b811461022c57600080fd5b50565b60008135905061023e81610218565b92915050565b6000806040838503121561025b5761025a6101a8565b5b6000610269858286016101f6565b925050602061027a8582860161022f565b9150509250929050565b61028d8161020b565b82525050565b60006020820190506102a86000830184610284565b92915050565b6000602082840312156102c4576102c36101a8565b5b60006102d2848285016101f6565b91505092915050565b6102e4816101cd565b82525050565b60006040820190506102ff60008301856102db565b61030c6020830184610284565b9392505050565b60008151905061032281610218565b92915050565b60006020828403121561033e5761033d6101a8565b5b600061034c84828501610313565b91505092915050565b600060208201905061036a60008301846102db565b9291505056fea26469706673582212202abfda89e671ab879c88bdd36665614ef3e65b7dee575a5336c7c7152df56dda64736f6c63430008120033 \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.json b/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.json new file mode 100644 index 000000000000..be014264239d --- /dev/null +++ b/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.json @@ -0,0 +1,45 @@ +[ + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + } + ], + "name": "claimAirdropFT", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "int64", + "name": "serial", + "type": "int64" + } + ], + "name": "claimAirdropNFT", + "outputs": [ + { + "internalType": "int64", + "name": "responseCode", + "type": "int64" + } + ], + "stateMutability": "nonpayable", + "type": "function" + } +] \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.sol b/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.sol new file mode 100644 index 000000000000..1099bbbc4294 --- /dev/null +++ b/hedera-node/test-clients/src/main/resources/contract/contracts/HRC904TokenClaim/HRC904TokenClaim.sol @@ -0,0 +1,17 @@ +// SPDX-License-Identifier: Apache-2.0 +pragma solidity ^0.8.0; + +interface IHRC904TokenClaim { + function claimAirdropFT(address senderAddress) external returns (int64 responseCode); + function claimAirdropNFT(address senderAddress, int64 serialNumber) external returns (int64 responseCode); +} + +contract HRC904TokenClaim is IHRC904TokenClaim{ + function claimAirdropFT(address sender) public returns (int64 responseCode) { + return IHRC904TokenClaim(this).claimAirdropFT(sender); + } + + function claimAirdropNFT(address sender, int64 serial) public returns (int64 responseCode) { + return IHRC904TokenClaim(this).claimAirdropNFT(sender, serial); + } +} \ No newline at end of file diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoStatic.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoStatic.java index 47e83641f986..425b443f82e5 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoStatic.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoStatic.java @@ -530,6 +530,7 @@ public static Map initNodeSecurity( if (cryptoConfig.enableNewKeyStoreModel()) { logger.debug(STARTUP.getMarker(), "Reading keys using the enhanced key loader"); keysAndCerts = EnhancedKeyStoreLoader.using(addressBook, configuration) + .migrate() .scan() .generateIfNecessary() .verify() diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoader.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoader.java index 8d5c668dd46b..cec5cab6f185 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoader.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoader.java @@ -17,6 +17,7 @@ package com.swirlds.platform.crypto; import static com.swirlds.common.utility.CommonUtils.nameToAlias; +import static com.swirlds.logging.legacy.LogMarker.ERROR; import static com.swirlds.logging.legacy.LogMarker.STARTUP; import static com.swirlds.platform.crypto.CryptoConstants.PUBLIC_KEYS_FILE; import static com.swirlds.platform.crypto.CryptoStatic.copyPublicKeys; @@ -32,8 +33,11 @@ import com.swirlds.platform.system.address.AddressBook; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; +import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -49,14 +53,18 @@ import java.security.Security; import java.security.UnrecoverableKeyException; import java.security.cert.Certificate; +import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; @@ -75,6 +83,8 @@ import org.bouncycastle.pkcs.PKCS8EncryptedPrivateKeyInfo; import org.bouncycastle.pkcs.PKCSException; import org.bouncycastle.util.encoders.DecoderException; +import org.bouncycastle.util.io.pem.PemObject; +import org.bouncycastle.util.io.pem.PemWriter; /** * This class is responsible for loading the key stores for all nodes in the address book. @@ -1165,4 +1175,344 @@ private interface AddressBookCallback { void apply(int index, NodeId nodeId, Address address, String nodeAlias) throws KeyStoreException, KeyLoadingException; } + + ///////////////////////////////////////////////////////////////////////////////////////////////// + //////////////////////////////////// MIGRATION METHODS ////////////////////////////////////////// + ///////////////////////////////////////////////////////////////////////////////////////////////// + + /** + * Performs any necessary migration steps to ensure the key storage is up-to-date. + *

+ * As of release 0.56 the on-disk cryptography should reflect the following structure: + *

    + *
  • s-private-alias.pem - the private signing key
  • + *
  • s-public-alias.pem - the public signing certificates of each node
  • + *
  • all *.pfx files moved to OLD_PFX_KEYS subdirectory and no longer used.
  • + *
  • all agreement key material is deleted from disk.
  • + *
+ * + * @return this {@link EnhancedKeyStoreLoader} instance. + */ + @NonNull + public EnhancedKeyStoreLoader migrate() throws KeyLoadingException, KeyStoreException { + logger.info(STARTUP.getMarker(), "Starting key store migration"); + final Map pfxPrivateKeys = new HashMap<>(); + final Map pfxCertificates = new HashMap<>(); + + // delete agreement keys permanently. They are being created at startup by generateIfNecessary() after scan(). + deleteAgreementKeys(); + + // create PEM files for signing keys and certs. + long errorCount = extractPrivateKeysAndCertsFromPfxFiles(pfxPrivateKeys, pfxCertificates); + + if (errorCount == 0) { + // validate only when there are no errors extracting pem files. + errorCount = validateKeysAndCertsAreLoadableFromPemFiles(pfxPrivateKeys, pfxCertificates); + } + + if (errorCount > 0) { + // roll back due to errors. + // this deletes any pem files created, but leaves the agreement keys deleted. + logger.error(STARTUP.getMarker(), "Due to {} errors, reverting pem file creation.", errorCount); + rollBackSigningKeysAndCertsChanges(pfxPrivateKeys, pfxCertificates); + } else { + // cleanup pfx files by moving them to subdirectory + cleanupByMovingPfxFilesToSubDirectory(); + logger.info(STARTUP.getMarker(), "Finished key store migration."); + } + + return this; + } + + /** + * Delete any agreement keys from the key store directory. + */ + private void deleteAgreementKeys() { + // delete any agreement keys of the form a-* + final File[] agreementKeyFiles = keyStoreDirectory.toFile().listFiles((dir, name) -> name.startsWith("a-")); + if (agreementKeyFiles != null) { + for (final File agreementKeyFile : agreementKeyFiles) { + if (agreementKeyFile.isFile()) { + try { + Files.delete(agreementKeyFile.toPath()); + logger.debug(STARTUP.getMarker(), "Deleted agreement key file {}", agreementKeyFile.getName()); + } catch (final IOException e) { + logger.error( + ERROR.getMarker(), + "Failed to delete agreement key file {}", + agreementKeyFile.getName()); + } + } + } + } + } + + /** + * Extracts the private keys and certificates from the PFX files and writes them to PEM files. + * + * @param pfxPrivateKeys the map of private keys being extracted (Updated By Method Call) + * @param pfxCertificates the map of certificates being extracted (Updated By Method Call) + * @return the number of errors encountered during the extraction process. + * @throws KeyStoreException if the underlying method calls throw this exception. + * @throws KeyLoadingException if the underlying method calls throw this exception. + */ + private long extractPrivateKeysAndCertsFromPfxFiles( + final Map pfxPrivateKeys, final Map pfxCertificates) + throws KeyStoreException, KeyLoadingException { + final KeyStore legacyPublicStore = resolveLegacyPublicStore(); + final AtomicLong errorCount = new AtomicLong(0); + + iterateAddressBook(addressBook, (i, nodeId, address, nodeAlias) -> { + if (isLocal(address)) { + // extract private keys for local nodes + final Path sPrivateKeyLocation = keyStoreDirectory.resolve("s-private-" + nodeAlias + ".pem"); + final Path ksLocation = legacyPrivateKeyStore(nodeAlias); + if (!Files.exists(sPrivateKeyLocation) && Files.exists(ksLocation)) { + logger.trace( + STARTUP.getMarker(), + "Extracting private signing key for node {} from file {}", + nodeId, + ksLocation.getFileName()); + final PrivateKey privateKey = + readLegacyPrivateKey(nodeId, ksLocation, KeyCertPurpose.SIGNING.storeName(nodeAlias)); + pfxPrivateKeys.put(nodeId, privateKey); + if (privateKey == null) { + logger.error( + ERROR.getMarker(), + "Failed to extract private signing key for node {} from file {}", + nodeId, + ksLocation.getFileName()); + errorCount.incrementAndGet(); + } else { + logger.trace( + STARTUP.getMarker(), + "Writing private signing key for node {} to PEM file {}", + nodeId, + sPrivateKeyLocation.getFileName()); + try { + writePemFile(true, sPrivateKeyLocation, privateKey.getEncoded()); + } catch (final IOException e) { + logger.error( + ERROR.getMarker(), + "Failed to write private key for node {} to PEM file {}", + nodeId, + sPrivateKeyLocation.getFileName()); + errorCount.incrementAndGet(); + } + } + } + } + + // extract certificates for all nodes + final Path sCertificateLocation = keyStoreDirectory.resolve("s-public-" + nodeAlias + ".pem"); + final Path ksLocation = legacyCertificateStore(); + if (!Files.exists(sCertificateLocation) && Files.exists(ksLocation)) { + logger.trace( + STARTUP.getMarker(), + "Extracting signing certificate for node {} from file {} ", + nodeId, + ksLocation.getFileName()); + final Certificate certificate = + readLegacyCertificate(nodeId, nodeAlias, KeyCertPurpose.SIGNING, legacyPublicStore); + pfxCertificates.put(nodeId, certificate); + if (certificate == null) { + logger.error( + ERROR.getMarker(), + "Failed to extract signing certificate for node {} from file {}", + nodeId, + ksLocation.getFileName()); + errorCount.incrementAndGet(); + } else { + logger.trace( + STARTUP.getMarker(), + "Writing signing certificate for node {} to PEM file {}", + nodeId, + sCertificateLocation.getFileName()); + try { + writePemFile(false, sCertificateLocation, certificate.getEncoded()); + } catch (final CertificateEncodingException | IOException e) { + logger.error( + ERROR.getMarker(), + "Failed to write signing certificate for node {} to PEM file {}", + nodeId, + sCertificateLocation.getFileName()); + errorCount.incrementAndGet(); + } + } + } + }); + return errorCount.get(); + } + + /** + * Validates that the private keys and certs in PEM files are loadable and match the PFX loaded keys and certs. + * + * @param pfxPrivateKeys the map of private keys being extracted. + * @param pfxCertificates the map of certificates being extracted. + * @return the number of errors encountered during the validation process. + * @throws KeyStoreException if the underlying method calls throw this exception. + * @throws KeyLoadingException if the underlying method calls throw this exception. + */ + private long validateKeysAndCertsAreLoadableFromPemFiles( + final Map pfxPrivateKeys, final Map pfxCertificates) + throws KeyStoreException, KeyLoadingException { + final AtomicLong errorCount = new AtomicLong(0); + iterateAddressBook(addressBook, (i, nodeId, address, nodeAlias) -> { + if (isLocal(address) && pfxCertificates.containsKey(nodeId)) { + // validate private keys for local nodes + final Path ksLocation = privateKeyStore(nodeAlias, KeyCertPurpose.SIGNING); + final PrivateKey pemPrivateKey = readPrivateKey(nodeId, ksLocation); + if (pemPrivateKey == null + || !Arrays.equals( + pemPrivateKey.getEncoded(), + pfxPrivateKeys.get(nodeId).getEncoded())) { + logger.error(ERROR.getMarker(), "Private key for node {} does not match the migrated key", nodeId); + errorCount.incrementAndGet(); + } + } + + // validate certificates for all nodes PEM files were created for. + if (pfxCertificates.containsKey(nodeId)) { + final Path ksLocation = certificateStore(nodeAlias, KeyCertPurpose.SIGNING); + final Certificate pemCertificate = readCertificate(nodeId, ksLocation); + try { + if (pemCertificate == null + || !Arrays.equals( + pemCertificate.getEncoded(), + pfxCertificates.get(nodeId).getEncoded())) { + logger.error( + ERROR.getMarker(), + "Certificate for node {} does not match the migrated certificate", + nodeId); + errorCount.incrementAndGet(); + } + } catch (final CertificateEncodingException e) { + logger.error(ERROR.getMarker(), "Encoding error while validating certificate for node {}.", nodeId); + errorCount.incrementAndGet(); + } + } + }); + return errorCount.get(); + } + + /** + * Rollback the creation of PEM files for signing keys and certificates. + * + * @param pfxPrivateKeys the map of private keys being extracted. + * @param pfxCertificates the map of certificates being extracted. + * @throws KeyStoreException if the underlying method calls throw this exception. + * @throws KeyLoadingException if the underlying method calls throw this exception. + */ + private void rollBackSigningKeysAndCertsChanges( + final Map pfxPrivateKeys, final Map pfxCertificates) + throws KeyStoreException, KeyLoadingException { + + final AtomicLong cleanupErrorCount = new AtomicLong(0); + iterateAddressBook(addressBook, (i, nodeId, address, nodeAlias) -> { + // private key rollback + if (isLocal(address) && pfxPrivateKeys.containsKey(address.getNodeId())) { + try { + Files.deleteIfExists(privateKeyStore(nodeAlias, KeyCertPurpose.SIGNING)); + } catch (final IOException e) { + cleanupErrorCount.incrementAndGet(); + } + } + // certificate rollback + if (pfxCertificates.containsKey(address.getNodeId())) { + try { + Files.deleteIfExists(certificateStore(nodeAlias, KeyCertPurpose.SIGNING)); + } catch (final IOException e) { + cleanupErrorCount.incrementAndGet(); + } + } + }); + if (cleanupErrorCount.get() > 0) { + logger.error( + ERROR.getMarker(), + "Failed to rollback {} pem files created. Manual cleanup required.", + cleanupErrorCount.get()); + throw new IllegalStateException("Cryptography Migration failed to generate or validate PEM files."); + } + } + + /** + * Move the PFX files to the OLD_PFX_KEYS subdirectory. + * + * @throws KeyStoreException if the underlying method calls throw this exception. + * @throws KeyLoadingException if the underlying method calls throw this exception. + */ + private void cleanupByMovingPfxFilesToSubDirectory() throws KeyStoreException, KeyLoadingException { + final AtomicLong cleanupErrorCount = new AtomicLong(0); + + final String archiveDirectory = ".archive"; + final String now = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss").format(LocalDateTime.now()); + final String newDirectory = archiveDirectory + File.pathSeparator + now; + final Path pfxArchiveDirectory = keyStoreDirectory.resolve(archiveDirectory); + final Path pfxDateDirectory = pfxArchiveDirectory.resolve(now); + + logger.info(STARTUP.getMarker(), "Cryptography Migration Cleanup: Moving PFX files to {}", pfxDateDirectory); + + if (!Files.exists(pfxDateDirectory)) { + try { + if (!Files.exists(pfxArchiveDirectory)) { + Files.createDirectory(pfxArchiveDirectory); + } + Files.createDirectory(pfxDateDirectory); + } catch (final IOException e) { + logger.error( + ERROR.getMarker(), + "Failed to create [{}] subdirectory. Manual cleanup required.", + newDirectory); + return; + } + } + iterateAddressBook(addressBook, (i, nodeId, address, nodeAlias) -> { + if (isLocal(address)) { + // move private key PFX files per local node + final File sPrivatePfx = legacyPrivateKeyStore(nodeAlias).toFile(); + if (sPrivatePfx.exists() + && sPrivatePfx.isFile() + && !sPrivatePfx.renameTo( + pfxDateDirectory.resolve(sPrivatePfx.getName()).toFile())) { + cleanupErrorCount.incrementAndGet(); + } + } + }); + final File sPublicPfx = legacyCertificateStore().toFile(); + if (sPublicPfx.exists() + && sPublicPfx.isFile() + && !sPublicPfx.renameTo( + pfxArchiveDirectory.resolve(sPublicPfx.getName()).toFile())) { + cleanupErrorCount.incrementAndGet(); + } + if (cleanupErrorCount.get() > 0) { + logger.error( + ERROR.getMarker(), + "Failed to move {} PFX files to [{}] subdirectory. Manual cleanup required.", + cleanupErrorCount.get(), + newDirectory); + throw new IllegalStateException( + "Cryptography Migration failed to move PFX files to [" + newDirectory + "] subdirectory."); + } + } + + /** + * Write the provided encoded key or certificate as a base64 DER encoded PEM file to the provided location. + * + * @param isPrivateKey true if the encoded data is a private key; false if it is a certificate. + * @param location the location to write the PEM file. + * @param encoded the byte encoded data to write to the PEM file. + * @throws IOException if an error occurred while writing the PEM file. + */ + private static void writePemFile( + final boolean isPrivateKey, @NonNull final Path location, @NonNull final byte[] encoded) + throws IOException { + final PemObject pemObj = new PemObject(isPrivateKey ? "PRIVATE KEY" : "CERTIFICATE", encoded); + try (final FileOutputStream file = new FileOutputStream(location.toFile(), false); + final var out = new OutputStreamWriter(file); + final PemWriter writer = new PemWriter(out)) { + writer.writeObject(pemObj); + file.getFD().sync(); + } + } } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoaderTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoaderTest.java index 4cbb9d556c71..71425965e30a 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoaderTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/crypto/EnhancedKeyStoreLoaderTest.java @@ -33,7 +33,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyStoreException; +import java.util.HashMap; import java.util.Map; +import java.util.stream.Stream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; @@ -113,6 +115,7 @@ void keyStoreLoaderPositiveTest(final String directoryName) assertThat(keyDirectory).exists().isDirectory().isReadable().isNotEmptyDirectory(); assertThat(loader).isNotNull(); + assertThatCode(loader::migrate).doesNotThrowAnyException(); assertThatCode(loader::scan).doesNotThrowAnyException(); assertThatCode(loader::generateIfNecessary).doesNotThrowAnyException(); assertThatCode(loader::verify).doesNotThrowAnyException(); @@ -159,6 +162,7 @@ void keyStoreLoaderNegativeCase1Test(final String directoryName) throws IOExcept assertThat(keyDirectory).exists().isDirectory().isReadable().isNotEmptyDirectory(); assertThat(loader).isNotNull(); + assertThatCode(loader::migrate).doesNotThrowAnyException(); assertThatCode(loader::scan).doesNotThrowAnyException(); assertThatCode(loader::verify).isInstanceOf(KeyLoadingException.class); assertThatCode(loader::injectInAddressBook).isInstanceOf(KeyLoadingException.class); @@ -183,9 +187,15 @@ void keyStoreLoaderNegativeCase2Test(final String directoryName) throws IOExcept assertThat(keyDirectory).exists().isDirectory().isReadable().isNotEmptyDirectory(); assertThat(loader).isNotNull(); + assertThatCode(loader::migrate).doesNotThrowAnyException(); assertThatCode(loader::scan).doesNotThrowAnyException(); + assertThatCode(loader::generateIfNecessary).isInstanceOf(KeyGeneratingException.class); assertThatCode(loader::verify).isInstanceOf(KeyLoadingException.class); - assertThatCode(loader::injectInAddressBook).doesNotThrowAnyException(); + if (directoryName.equals("hybrid-invalid-case-2") || directoryName.equals("enhanced-invalid-case-2")) { + assertThatCode(loader::injectInAddressBook).isInstanceOf(KeyLoadingException.class); + } else { + assertThatCode(loader::injectInAddressBook).doesNotThrowAnyException(); + } assertThatCode(loader::keysAndCerts).isInstanceOf(KeyLoadingException.class); } @@ -214,4 +224,53 @@ private Configuration configure(final Path keyDirectory) throws IOException { private AddressBook addressBook() { return loadConfigFile(testDataDirectory.resolve("config.txt")).getAddressBook(); } + + ///////////////////////////////////////////////////////////////////////////// + //////////////////////// MIGRATION SPECIFIC UNIT TESTS ////////////////////// + ///////////////////////////////////////////////////////////////////////////// + + /** + * The Negative Type 2 tests are designed to test the case where the key store loader is able to scan the key + * directory, but one or more private keys are either corrupt or missing. + * + * @param directoryName the directory name containing the test data being used to cover a given test case. + * @throws IOException if an I/O error occurs during test setup. + */ + @ParameterizedTest + @DisplayName("Migration Negative Cases Test") + @ValueSource(strings = {"migration-invalid-missing-private-key", "migration-invalid-missing-public-key"}) + void migraitonNegativeCaseTest(final String directoryName) throws IOException { + final Path keyDirectory = testDataDirectory.resolve(directoryName); + final AddressBook addressBook = addressBook(); + final EnhancedKeyStoreLoader loader = EnhancedKeyStoreLoader.using(addressBook, configure(keyDirectory)); + + assertThat(keyDirectory).exists().isDirectory().isReadable().isNotEmptyDirectory(); + + // read all files into memory for later comparison. + Map fileContents = new HashMap<>(); + try (Stream paths = Files.list(keyDirectory)) { + paths.forEach(path -> { + try { + fileContents.put(path.getFileName().toString(), Files.readAllBytes(path)); + } catch (IOException e) { + assert (false); + } + }); + } + + assertThat(loader).isNotNull(); + assertThatCode(loader::migrate).doesNotThrowAnyException(); + + // check that the migration rolled back the changes and that the files are identical. + try (Stream paths = Files.list(keyDirectory)) { + paths.forEach(path -> { + try { + assertThat(Files.readAllBytes(path)) + .isEqualTo(fileContents.get(path.getFileName().toString())); + } catch (IOException e) { + assert (false); + } + }); + } + } } diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-private-key/private-alice.pfx b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-private-key/private-alice.pfx new file mode 100644 index 000000000000..c7af10445233 Binary files /dev/null and b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-private-key/private-alice.pfx differ diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-private-key/public.pfx b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-private-key/public.pfx new file mode 100644 index 000000000000..d6631eeb7fda Binary files /dev/null and b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-private-key/public.pfx differ diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-alice.pfx b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-alice.pfx new file mode 100644 index 000000000000..c7af10445233 Binary files /dev/null and b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-alice.pfx differ diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-bob.pfx b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-bob.pfx new file mode 100644 index 000000000000..e243077f29a5 Binary files /dev/null and b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-bob.pfx differ diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-carol.pfx b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-carol.pfx new file mode 100644 index 000000000000..c7f588884d4e Binary files /dev/null and b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/private-carol.pfx differ diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/public.pfx b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/public.pfx new file mode 100644 index 000000000000..dc83c2899a0a Binary files /dev/null and b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/crypto/EnhancedKeyStoreLoader/migration-invalid-missing-public-key/public.pfx differ